summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors-migration/scala/actors/Pattern.scala4
-rw-r--r--src/actors-migration/scala/actors/StashingActor.scala2
-rw-r--r--src/actors-migration/scala/actors/Timeout.scala2
-rw-r--r--src/actors/scala/actors/AbstractActor.scala2
-rw-r--r--src/actors/scala/actors/Actor.scala2
-rw-r--r--src/actors/scala/actors/CanReply.scala2
-rw-r--r--src/actors/scala/actors/Combinators.scala2
-rw-r--r--src/actors/scala/actors/Future.scala2
-rw-r--r--src/actors/scala/actors/Reactor.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala2
-rw-r--r--src/build/InnerObjectTestGen.scala2
-rw-r--r--src/build/genprod.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Aliases.scala8
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Evals.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Exprs.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Infrastructure.scala6
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Parsers.scala6
-rw-r--r--src/compiler/scala/reflect/macros/runtime/TypeTags.scala2
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala4
-rw-r--r--src/compiler/scala/reflect/reify/States.scala8
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala4
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala62
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala5
-rw-r--r--src/compiler/scala/reflect/reify/package.scala22
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala4
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala8
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala22
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala8
-rw-r--r--src/compiler/scala/reflect/reify/utils/StdAttachments.scala10
-rw-r--r--src/compiler/scala/reflect/reify/utils/SymbolTables.scala61
-rw-r--r--src/compiler/scala/tools/ant/ClassloadVerify.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compilers.scala4
-rw-r--r--src/compiler/scala/tools/cmd/Property.scala2
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala11
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala2
-rw-r--r--src/compiler/scala/tools/cmd/package.scala4
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala32
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Phases.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala12
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala26
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala12
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala9
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala151
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala45
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala245
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/Uncompilable.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala20
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala6
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Source.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala72
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala16
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ContextTrees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala46
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala73
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/Tester.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala5
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/NamedParam.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Phased.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala4
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala4
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala18
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala42
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/Mixer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala92
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala13
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala153
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala23
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala27
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala131
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala24
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala26
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala39
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala69
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala40
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala333
-rw-r--r--src/compiler/scala/tools/nsc/util/Exceptional.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/StatisticsInfo.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala2
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala8
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnds.scala10
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala15
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala60
-rw-r--r--src/compiler/scala/tools/reflect/package.scala4
-rw-r--r--src/compiler/scala/tools/util/Javap.scala2
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala2
-rw-r--r--src/compiler/scala/tools/util/VerifyClass.scala2
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala2
-rw-r--r--src/detach/library/scala/remoting/Channel.scala8
-rw-r--r--src/library/scala/App.scala10
-rw-r--r--src/library/scala/Array.scala14
-rw-r--r--src/library/scala/Boolean.scala2
-rw-r--r--src/library/scala/Byte.scala2
-rw-r--r--src/library/scala/Char.scala2
-rw-r--r--src/library/scala/Double.scala2
-rw-r--r--src/library/scala/Dynamic.scala10
-rw-r--r--src/library/scala/Float.scala2
-rw-r--r--src/library/scala/Function.scala4
-rw-r--r--src/library/scala/Int.scala2
-rw-r--r--src/library/scala/Long.scala2
-rw-r--r--src/library/scala/LowPriorityImplicits.scala23
-rw-r--r--src/library/scala/Option.scala2
-rw-r--r--src/library/scala/PartialFunction.scala139
-rw-r--r--src/library/scala/Predef.scala8
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/SerialVersionUID.scala2
-rw-r--r--src/library/scala/Short.scala2
-rw-r--r--src/library/scala/StringContext.scala11
-rw-r--r--src/library/scala/Unit.scala2
-rw-r--r--src/library/scala/annotation/bridge.scala2
-rw-r--r--src/library/scala/annotation/cloneable.scala2
-rw-r--r--src/library/scala/annotation/elidable.scala8
-rw-r--r--src/library/scala/annotation/implicitNotFound.scala2
-rw-r--r--src/library/scala/annotation/meta/beanGetter.scala2
-rw-r--r--src/library/scala/annotation/meta/beanSetter.scala2
-rw-r--r--src/library/scala/annotation/meta/companionClass.scala4
-rw-r--r--src/library/scala/annotation/meta/companionMethod.scala4
-rw-r--r--src/library/scala/annotation/meta/companionObject.scala2
-rw-r--r--src/library/scala/annotation/meta/field.scala2
-rw-r--r--src/library/scala/annotation/meta/getter.scala2
-rw-r--r--src/library/scala/annotation/meta/languageFeature.scala2
-rw-r--r--src/library/scala/annotation/meta/param.scala2
-rw-r--r--src/library/scala/annotation/meta/setter.scala2
-rw-r--r--src/library/scala/annotation/migration.scala4
-rw-r--r--src/library/scala/annotation/serializable.scala2
-rw-r--r--src/library/scala/annotation/static.scala20
-rw-r--r--src/library/scala/annotation/strictfp.scala2
-rw-r--r--src/library/scala/annotation/switch.scala2
-rw-r--r--src/library/scala/annotation/tailrec.scala2
-rw-r--r--src/library/scala/annotation/unchecked/uncheckedStable.scala2
-rw-r--r--src/library/scala/annotation/unchecked/uncheckedVariance.scala2
-rw-r--r--src/library/scala/annotation/unspecialized.scala2
-rw-r--r--src/library/scala/annotation/varargs.scala2
-rw-r--r--src/library/scala/beans/BeanDescription.scala2
-rw-r--r--src/library/scala/beans/BeanDisplayName.scala2
-rw-r--r--src/library/scala/beans/BeanInfo.scala2
-rw-r--r--src/library/scala/beans/BeanInfoSkip.scala2
-rw-r--r--src/library/scala/beans/BeanProperty.scala2
-rw-r--r--src/library/scala/beans/BooleanBeanProperty.scala2
-rw-r--r--src/library/scala/collection/CustomParallelizable.scala2
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala1
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala12
-rw-r--r--src/library/scala/collection/GenSetLike.scala2
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala2
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala6
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala2
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala5
-rw-r--r--src/library/scala/collection/IterableLike.scala6
-rw-r--r--src/library/scala/collection/IterableViewLike.scala2
-rw-r--r--src/library/scala/collection/Iterator.scala11
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala4
-rw-r--r--src/library/scala/collection/MapLike.scala10
-rw-r--r--src/library/scala/collection/SeqLike.scala5
-rw-r--r--src/library/scala/collection/SetLike.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala8
-rw-r--r--src/library/scala/collection/TraversableOnce.scala16
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala4
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala51
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala2
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala2
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala4
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala2
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala11
-rw-r--r--src/library/scala/collection/generic/ClassTagTraversableFactory.scala4
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala2
-rw-r--r--src/library/scala/collection/generic/GenMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenSeqFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenericClassTagCompanion.scala4
-rw-r--r--src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala6
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericOrderedCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala4
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala6
-rw-r--r--src/library/scala/collection/generic/GenericSeqCompanion.scala4
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala10
-rw-r--r--src/library/scala/collection/generic/ImmutableMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala4
-rw-r--r--src/library/scala/collection/generic/IsTraversableOnce.scala4
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/OrderedTraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala10
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala4
-rw-r--r--src/library/scala/collection/generic/SliceInterval.scala2
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/package.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala99
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala46
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala6
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala16
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala2
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala5
-rw-r--r--src/library/scala/collection/immutable/Map.scala2
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala4
-rw-r--r--src/library/scala/collection/immutable/Queue.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala12
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala3
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala7
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala14
-rw-r--r--src/library/scala/collection/immutable/Stream.scala4
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala12
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala4
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala2
-rw-r--r--src/library/scala/collection/immutable/Vector.scala9
-rw-r--r--src/library/scala/collection/immutable/package.scala4
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala10
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala35
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala7
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala5
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala14
-rw-r--r--src/library/scala/collection/mutable/Builder.scala3
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala2
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala7
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala20
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala33
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala8
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala62
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala8
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala2
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala43
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala85
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala12
-rw-r--r--src/library/scala/collection/mutable/Map.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala7
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala5
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala10
-rw-r--r--src/library/scala/collection/mutable/Queue.scala6
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala7
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala2
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala2
-rw-r--r--src/library/scala/collection/mutable/Stack.scala6
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala8
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala2
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala24
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala20
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala37
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala18
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala16
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala10
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala51
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala32
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala18
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala16
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala14
-rw-r--r--src/library/scala/collection/parallel/package.scala19
-rw-r--r--src/library/scala/compat/Platform.scala2
-rw-r--r--src/library/scala/concurrent/Awaitable.scala23
-rw-r--r--src/library/scala/concurrent/BlockContext.scala3
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala1
-rw-r--r--src/library/scala/concurrent/Future.scala17
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala2
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadRunner.scala2
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala1
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala42
-rw-r--r--src/library/scala/concurrent/package.scala33
-rw-r--r--src/library/scala/concurrent/util/Duration.scala1000
-rw-r--r--src/library/scala/concurrent/util/duration/package.scala2
-rw-r--r--src/library/scala/deprecated.scala4
-rw-r--r--src/library/scala/deprecatedInheritance.scala22
-rw-r--r--src/library/scala/deprecatedName.scala4
-rw-r--r--src/library/scala/deprecatedOverriding.scala21
-rw-r--r--src/library/scala/inline.scala2
-rw-r--r--src/library/scala/io/BytePickle.scala1
-rw-r--r--src/library/scala/io/Codec.scala6
-rw-r--r--src/library/scala/io/Position.scala3
-rw-r--r--src/library/scala/io/UTF8Codec.scala1
-rw-r--r--src/library/scala/language.scala22
-rw-r--r--src/library/scala/languageFeature.scala9
-rw-r--r--src/library/scala/math/BigDecimal.scala5
-rw-r--r--src/library/scala/math/BigInt.scala7
-rw-r--r--src/library/scala/math/Fractional.scala4
-rw-r--r--src/library/scala/math/Integral.scala4
-rw-r--r--src/library/scala/math/Numeric.scala2
-rw-r--r--src/library/scala/math/Ordered.scala2
-rw-r--r--src/library/scala/math/Ordering.scala7
-rw-r--r--src/library/scala/math/ScalaNumericConversions.scala12
-rw-r--r--src/library/scala/native.scala2
-rw-r--r--src/library/scala/noinline.scala2
-rw-r--r--src/library/scala/reflect/ClassTag.scala96
-rw-r--r--src/library/scala/reflect/Manifest.scala23
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala3
-rw-r--r--src/library/scala/reflect/base/AnnotationInfos.scala44
-rw-r--r--src/library/scala/reflect/base/Annotations.scala106
-rw-r--r--src/library/scala/reflect/base/Attachments.scala32
-rw-r--r--src/library/scala/reflect/base/Base.scala54
-rw-r--r--src/library/scala/reflect/base/BuildUtils.scala29
-rw-r--r--src/library/scala/reflect/base/Constants.scala16
-rw-r--r--src/library/scala/reflect/base/Exprs.scala77
-rw-r--r--src/library/scala/reflect/base/FlagSets.scala2
-rw-r--r--src/library/scala/reflect/base/MirrorOf.scala23
-rw-r--r--src/library/scala/reflect/base/Mirrors.scala14
-rw-r--r--src/library/scala/reflect/base/Names.scala38
-rw-r--r--src/library/scala/reflect/base/Positions.scala11
-rw-r--r--src/library/scala/reflect/base/Scopes.scala17
-rw-r--r--src/library/scala/reflect/base/StandardDefinitions.scala50
-rw-r--r--src/library/scala/reflect/base/StandardNames.scala4
-rw-r--r--src/library/scala/reflect/base/Symbols.scala43
-rw-r--r--src/library/scala/reflect/base/TagInterop.scala16
-rw-r--r--src/library/scala/reflect/base/TreeCreator.scala20
-rw-r--r--src/library/scala/reflect/base/Trees.scala178
-rw-r--r--src/library/scala/reflect/base/TypeCreator.scala20
-rw-r--r--src/library/scala/reflect/base/TypeTags.scala302
-rw-r--r--src/library/scala/reflect/base/Types.scala33
-rw-r--r--src/library/scala/reflect/base/Universe.scala44
-rw-r--r--src/library/scala/reflect/macros/internal/macroImpl.scala2
-rw-r--r--src/library/scala/reflect/macros/internal/package.scala5
-rw-r--r--src/library/scala/reflect/package.scala69
-rw-r--r--src/library/scala/remote.scala2
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala40
-rw-r--r--src/library/scala/runtime/RichBoolean.scala4
-rw-r--r--src/library/scala/runtime/RichByte.scala5
-rw-r--r--src/library/scala/runtime/RichChar.scala5
-rw-r--r--src/library/scala/runtime/RichDouble.scala9
-rw-r--r--src/library/scala/runtime/RichException.scala2
-rw-r--r--src/library/scala/runtime/RichFloat.scala9
-rw-r--r--src/library/scala/runtime/RichInt.scala4
-rw-r--r--src/library/scala/runtime/RichLong.scala5
-rw-r--r--src/library/scala/runtime/RichShort.scala5
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala21
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala56
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala5
-rw-r--r--src/library/scala/runtime/StringAdd.scala10
-rw-r--r--src/library/scala/runtime/StringFormat.scala10
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala51
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala64
-rw-r--r--src/library/scala/runtime/WorksheetSupport.scala6
-rw-r--r--src/library/scala/specialized.scala4
-rw-r--r--src/library/scala/sys/BooleanProp.scala2
-rw-r--r--src/library/scala/sys/Prop.scala3
-rw-r--r--src/library/scala/sys/SystemProperties.scala2
-rw-r--r--src/library/scala/sys/package.scala4
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/Process.scala2
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala4
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala2
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/testing/Benchmark.scala3
-rw-r--r--src/library/scala/testing/Show.scala1
-rw-r--r--src/library/scala/throws.scala2
-rw-r--r--src/library/scala/transient.scala4
-rw-r--r--src/library/scala/unchecked.scala2
-rw-r--r--src/library/scala/util/Either.scala2
-rw-r--r--src/library/scala/util/MurmurHash.scala2
-rw-r--r--src/library/scala/util/Random.scala6
-rw-r--r--src/library/scala/util/Sorting.scala5
-rw-r--r--src/library/scala/util/Try.scala67
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala4
-rw-r--r--src/library/scala/util/control/Exception.scala10
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala3
-rw-r--r--src/library/scala/util/hashing/Hashing.scala15
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala30
-rw-r--r--src/library/scala/util/logging/ConsoleLogger.scala1
-rw-r--r--src/library/scala/util/logging/Logged.scala1
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala6
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/testing/RegexTest.scala2
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala2
-rw-r--r--src/library/scala/volatile.scala4
-rwxr-xr-xsrc/library/scala/xml/Elem.scala2
-rw-r--r--src/library/scala/xml/Equality.scala2
-rw-r--r--src/library/scala/xml/MetaData.scala2
-rw-r--r--src/library/scala/xml/NodeSeq.scala4
-rwxr-xr-xsrc/library/scala/xml/Utility.scala2
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala6
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala15
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala4
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala2
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala4
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala4
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala2
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala2
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala12
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala5
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala12
-rw-r--r--src/partest/scala/tools/partest/package.scala35
-rw-r--r--src/reflect/scala/reflect/api/AnnotationInfos.scala27
-rw-r--r--src/reflect/scala/reflect/api/Annotations.scala29
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala2
-rw-r--r--src/reflect/scala/reflect/api/FrontEnds.scala4
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala2
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala68
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala9
-rw-r--r--src/reflect/scala/reflect/api/Types.scala14
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AbstractFileApi.scala7
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala87
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala11
-rw-r--r--src/reflect/scala/reflect/internal/Chars.scala4
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala34
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala66
-rw-r--r--src/reflect/scala/reflect/internal/FlagSets.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala12
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala139
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala103
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala8
-rw-r--r--src/reflect/scala/reflect/internal/transform/Transforms.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/HashSet.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala8
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala20
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/TableDef.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Aliases.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Exprs.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Infrastructure.scala21
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Reifiers.scala6
-rw-r--r--src/reflect/scala/reflect/macros/TypeTags.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala10
-rw-r--r--src/reflect/scala/reflect/runtime/AbstractFile.scala7
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala208
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala12
-rw-r--r--src/reflect/scala/reflect/runtime/TwoWayCache.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala5
-rw-r--r--src/reflect/scala/tools/nsc/io/AbstractFile.scala2
-rw-r--r--src/reflect/scala/tools/nsc/io/File.scala2
-rw-r--r--src/reflect/scala/tools/nsc/io/Path.scala2
-rw-r--r--src/reflect/scala/tools/nsc/io/Streamable.scala2
-rw-r--r--src/reflect/scala/tools/nsc/io/VirtualFile.scala2
-rw-r--r--src/reflect/scala/tools/nsc/io/ZipArchive.scala8
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala2
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/package.scala8
-rw-r--r--src/swing/scala/swing/package.scala4
589 files changed, 4875 insertions, 3899 deletions
diff --git a/src/actors-migration/scala/actors/Pattern.scala b/src/actors-migration/scala/actors/Pattern.scala
index 26e9d1bb64..fa2db79152 100644
--- a/src/actors-migration/scala/actors/Pattern.scala
+++ b/src/actors-migration/scala/actors/Pattern.scala
@@ -1,7 +1,7 @@
package scala.actors
import scala.concurrent.util.Duration
-import language.implicitConversions
+import scala.language.implicitConversions
object pattern {
@@ -23,4 +23,4 @@ class AskableActorRef(val ar: ActorRef) extends ActorRef {
def forward(message: Any) = ar.forward(message)
private[actors] def localActor: AbstractActor = ar.localActor
-} \ No newline at end of file
+}
diff --git a/src/actors-migration/scala/actors/StashingActor.scala b/src/actors-migration/scala/actors/StashingActor.scala
index 8f96e1b002..29f370a3e2 100644
--- a/src/actors-migration/scala/actors/StashingActor.scala
+++ b/src/actors-migration/scala/actors/StashingActor.scala
@@ -3,7 +3,7 @@ package scala.actors
import scala.collection._
import scala.concurrent.util.Duration
import java.util.concurrent.TimeUnit
-import language.implicitConversions
+import scala.language.implicitConversions
object StashingActor extends Combinators {
implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
diff --git a/src/actors-migration/scala/actors/Timeout.scala b/src/actors-migration/scala/actors/Timeout.scala
index 7e400ab140..5540d2880e 100644
--- a/src/actors-migration/scala/actors/Timeout.scala
+++ b/src/actors-migration/scala/actors/Timeout.scala
@@ -10,7 +10,7 @@ package scala.actors
import scala.concurrent.util.Duration
import java.util.concurrent.TimeUnit
-import language.implicitConversions
+import scala.language.implicitConversions
case class Timeout(duration: Duration) {
def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index dec91859bb..fd11b9a0a8 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -8,7 +8,7 @@
package scala.actors
-import language.higherKinds
+import scala.language.higherKinds
/**
* @author Philipp Haller
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 0b1e7fb1e9..8869165062 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -10,7 +10,7 @@ package scala.actors
import scala.util.control.ControlThrowable
import java.util.{Timer, TimerTask}
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* Provides functions for the definition of actors, as well as actor
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
index 9bf0022247..92ab23dae1 100644
--- a/src/actors/scala/actors/CanReply.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -8,7 +8,7 @@
package scala.actors
-import language.higherKinds
+import scala.language.higherKinds
/**
* Defines result-bearing message send operations.
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
index dd704436fc..aef01b67a8 100644
--- a/src/actors/scala/actors/Combinators.scala
+++ b/src/actors/scala/actors/Combinators.scala
@@ -10,7 +10,7 @@
package scala.actors
-import language.implicitConversions
+import scala.language.implicitConversions
private[actors] trait Combinators {
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 735c13190b..fb7bb488a2 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -174,7 +174,7 @@ object Futures {
* or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- var resultsMap: collection.mutable.Map[Int, Option[Any]] = new collection.mutable.HashMap[Int, Option[Any]]
+ var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
val mappedFts = fts.map(ft =>
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index c962bb9d3d..11c910e577 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -12,7 +12,7 @@ package scala.actors
import scala.actors.scheduler.{DelegatingScheduler, ExecutorScheduler,
ForkJoinScheduler, ThreadPoolConfig}
import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue}
-import language.implicitConversions
+import scala.language.implicitConversions
private[actors] object Reactor {
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index a7bf8ec2ba..59f4afccc4 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -10,7 +10,7 @@
package scala.actors
package scheduler
-import util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
+import scala.util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
/**
* @author Erik Engbrecht
diff --git a/src/build/InnerObjectTestGen.scala b/src/build/InnerObjectTestGen.scala
index 5945bc17c7..b66112609c 100644
--- a/src/build/InnerObjectTestGen.scala
+++ b/src/build/InnerObjectTestGen.scala
@@ -1,4 +1,4 @@
-import collection.mutable
+import scala.collection.mutable
/** All contexts where objects can be embedded. */
object Contexts extends Enumeration {
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 83a65e6876..b9511c1ad2 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-import language.postfixOps
+import scala.language.postfixOps
/** This program generates the ProductN, TupleN, FunctionN,
* and AbstractFunctionN, where 0 <= N <= MAX_ARITY.
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
index 5e15b61dbd..30e72997f7 100644
--- a/src/compiler/scala/reflect/macros/runtime/Aliases.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
@@ -17,12 +17,12 @@ trait Aliases {
override type Expr[+T] = universe.Expr[T]
override val Expr = universe.Expr
- override type AbsTypeTag[T] = universe.AbsTypeTag[T]
+ override type WeakTypeTag[T] = universe.WeakTypeTag[T]
override type TypeTag[T] = universe.TypeTag[T]
- override val AbsTypeTag = universe.AbsTypeTag
+ override val WeakTypeTag = universe.WeakTypeTag
override val TypeTag = universe.TypeTag
- override def absTypeTag[T](implicit attag: AbsTypeTag[T]) = attag
+ override def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
override def typeTag[T](implicit ttag: TypeTag[T]) = ttag
- override def absTypeOf[T](implicit attag: AbsTypeTag[T]): Type = attag.tpe
+ override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/runtime/Evals.scala
index 348e29cdd7..acafeb5b02 100644
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Evals.scala
@@ -13,6 +13,6 @@ trait Evals {
def eval[T](expr: Expr[T]): T = {
val imported = evalImporter.importTree(expr.tree)
- evalToolBox.runExpr(imported).asInstanceOf[T]
+ evalToolBox.eval(imported).asInstanceOf[T]
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Exprs.scala b/src/compiler/scala/reflect/macros/runtime/Exprs.scala
index 4217a6a404..ebf8fa2b96 100644
--- a/src/compiler/scala/reflect/macros/runtime/Exprs.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Exprs.scala
@@ -4,5 +4,5 @@ package runtime
trait Exprs {
self: Context =>
- def Expr[T: AbsTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
+ def Expr[T: WeakTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
}
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
index 0a8a8d015d..a8cc61e0f9 100644
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
@@ -33,8 +33,4 @@ trait Infrastructure {
}
val currentMacro: Symbol = expandee.symbol
-
- val globalCache: collection.mutable.Map[Any, Any] = universe.analyzer.globalMacroCache
-
- val cache: collection.mutable.Map[Any, Any] = universe.analyzer.perRunMacroCache.getOrElseUpdate(currentMacro, collection.mutable.Map[Any, Any]())
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
index 6d89b71f39..5096526fdb 100644
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
@@ -1,7 +1,7 @@
package scala.reflect.macros
package runtime
-import language.existentials
+import scala.language.existentials
import scala.tools.reflect.ToolBox
import scala.tools.reflect.ToolBoxError
@@ -12,7 +12,7 @@ trait Parsers {
// todo. provide decent implementation
try {
import scala.reflect.runtime.{universe => ru}
- val parsed = ru.rootMirror.mkToolBox().parseExpr(code)
+ val parsed = ru.rootMirror.mkToolBox().parse(code)
val importer = universe.mkImporter(ru)
importer.importTree(parsed)
} catch {
@@ -22,4 +22,4 @@ trait Parsers {
case class ParseError(val pos: Position, val msg: String) extends Throwable(msg)
object ParseError extends ParseErrorExtractor
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/TypeTags.scala b/src/compiler/scala/reflect/macros/runtime/TypeTags.scala
index 2bc2fe6384..19b60159de 100644
--- a/src/compiler/scala/reflect/macros/runtime/TypeTags.scala
+++ b/src/compiler/scala/reflect/macros/runtime/TypeTags.scala
@@ -4,6 +4,6 @@ package runtime
trait TypeTags {
self: Context =>
- def AbsTypeTag[T](tpe: Type): AbsTypeTag[T] = universe.AbsTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+ def WeakTypeTag[T](tpe: Type): WeakTypeTag[T] = universe.WeakTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
def TypeTag[T](tpe: Type): TypeTag[T] = universe.TypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
}
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 5e15c5ad3a..73c13901b6 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -27,8 +27,8 @@ trait Errors {
throw new ReificationError(defaultErrorPosition, msg)
}
- def CannotReifyTypeTagHavingUnresolvedTypeParameters(tpe: Type) = {
- val msg = "cannot reify TypeTag having unresolved type parameter %s".format(tpe)
+ def CannotReifyWeakType(details: Any) = {
+ val msg = "cannot create a TypeTag" + details
throw new ReificationError(defaultErrorPosition, msg)
}
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
index a01cfe5d74..58455c9f3c 100644
--- a/src/compiler/scala/reflect/reify/States.scala
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -34,9 +34,11 @@ trait States {
def reificationIsConcrete_=(value: Boolean): Unit = {
_reificationIsConcrete = value
if (!value && concrete) {
- assert(current.isInstanceOf[Type], current)
- val offender = current.asInstanceOf[Type]
- CannotReifyTypeTagHavingUnresolvedTypeParameters(offender)
+ current match {
+ case tpe: Type => CannotReifyWeakType(s" having unresolved type parameter $tpe")
+ case sym: Symbol => CannotReifyWeakType(s" referring to local ${sym.kindString} ${sym.fullName}")
+ case _ => CannotReifyWeakType("")
+ }
}
}
var reifyStack = reifee :: Nil
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index a8523fe686..bc12d383a4 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -37,7 +37,7 @@ abstract class Taggers {
}
def materializeTypeTag(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean): Tree = {
- val tagType = if (concrete) TypeTagClass else AbsTypeTagClass
+ val tagType = if (concrete) TypeTagClass else WeakTypeTagClass
// what we need here is to compose a type BaseUniverse # TypeTag[$tpe]
// to look for an implicit that conforms to this type
// that's why neither appliedType(tagType, List(tpe)) aka TypeRef(TypeTagsClass.thisType, tagType, List(tpe))
@@ -50,7 +50,7 @@ abstract class Taggers {
case success if !success.isEmpty =>
Apply(Select(success, nme.in), List(mirror orElse mkDefaultMirrorRef(c.universe)(universe, c.callsiteTyper)))
case _ =>
- val tagModule = if (concrete) TypeTagModule else AbsTypeTagModule
+ val tagModule = if (concrete) TypeTagModule else WeakTypeTagModule
materializeTag(universe, tpe, tagModule, c.reifyType(universe, mirror, tpe, concrete = concrete))
}
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index 5f4296f54f..dec491aabe 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -50,6 +50,6 @@ trait GenAnnotationInfos {
// if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
- mirrorFactoryCall(nme.AnnotationInfo, reify(ann.atp), mkList(reifiedArgs), mkList(reifiedAssocs))
+ mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index ca6e14cfd3..22a834d2e4 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -90,49 +90,61 @@ trait GenSymbols {
}
} else {
// todo. make sure that free methods and free local defs work correctly
- if (sym.isTerm) reifyFreeTerm(sym, Ident(sym))
- else reifyFreeType(sym, Ident(sym))
+ if (sym.isExistential) reifySymDef(sym)
+ else if (sym.isTerm) reifyFreeTerm(Ident(sym))
+ else reifyFreeType(Ident(sym))
}
}
- def reifyFreeTerm(sym: Symbol, value: Tree): Tree =
- reifyIntoSymtab(sym) {
+ def reifyFreeTerm(binding: Tree): Tree =
+ reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
- var name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
- if (sym.isType) name = name.append(nme.REIFY_FREE_THIS_SUFFIX)
+ val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
if (sym.isCapturedVariable) {
- assert(value.isInstanceOf[Ident], showRaw(value))
- val capturedTpe = capturedVariableType(sym)
- val capturedValue = referenceCapturedVariable(sym)
- (name, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), reify(capturedTpe), capturedValue, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ assert(binding.isInstanceOf[Ident], showRaw(binding))
+ val capturedBinding = referenceCapturedVariable(sym)
+ Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
} else {
- (name, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), reify(sym.tpe), value, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
}
}
- def reifyFreeType(sym: Symbol, value: Tree): Tree =
- reifyIntoSymtab(sym) {
+ def reifyFreeType(binding: Tree): Tree =
+ reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
- var name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
- val phantomTypeTag = Apply(TypeApply(Select(Ident(nme.UNIVERSE_SHORT), nme.TypeTag), List(value)), List(Literal(Constant(null)), Literal(Constant(null))))
- val flavor = if (sym.isExistential) nme.newFreeExistential else nme.newFreeType
- (name, mirrorBuildCall(flavor, reify(sym.name.toString), reify(sym.info), phantomTypeTag, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ state.reificationIsConcrete = false
+ val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
+ Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
}
def reifySymDef(sym: Symbol): Tree =
- reifyIntoSymtab(sym) {
+ reifyIntoSymtab(sym) { sym =>
if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
- assert(!sym.isLocatable, sym) // if this assertion fires, then tough type reification needs to be rethought
- sym.owner.ownersIterator find (!_.isLocatable) foreach reifySymDef
- var name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
- (name, mirrorBuildCall(nme.newNestedSymbol, reify(sym.owner), reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
+ val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+ def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
+ Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
}
- private def reifyIntoSymtab(sym: Symbol)(reificode: => (TermName, Tree)): Tree ={
+ case class Reification(name: Name, binding: Tree, tree: Tree)
+
+ private def reifyIntoSymtab(sym: Symbol)(reificode: Symbol => Reification): Tree = {
def fromSymtab = symtab symRef sym
if (fromSymtab == EmptyTree) {
- val reification = reificode
- state.symtab += (sym, reification._1, reification._2)
+ // reification is lazy, so that we can carefully choose where to evaluate it
+ // and we choose this place to be exactly here:
+ //
+ // reasons:
+ // 1) reification happens at maximum once per symbol to prevent repeated reifications
+ // 2) reification happens before putting the symbol itself into the symbol table to ensure correct initialization order:
+ // for example, if reification of symbol A refers to reification of symbol B
+ // (this might happen when we're doing `reifySymDef`, which expands into `newNestedSymbol`, which needs `sym.owner`)
+ // then we have to put reification-B into the symbol table before reification-A
+ // so that subsequent code generation that traverses the symbol table in the first-added first-codegenned order
+ // produces valid Scala code (with vals in a block depending only on lexically preceding vals)
+ val reification = reificode(sym)
+ import reification.{name, binding}
+ val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
+ state.symtab += (sym, name, tree)
}
fromSymtab
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index f48df8df65..bdcc7383b0 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -124,7 +124,7 @@ trait GenTrees {
val sym = tree.symbol
if (reifyDebug) println("This for %s, reified as freeVar".format(sym))
if (reifyDebug) println("Free: " + sym)
- mirrorBuildCall(nme.Ident, reifyFreeTerm(sym, This(sym)))
+ mirrorBuildCall(nme.Ident, reifyFreeTerm(This(sym)))
case tree @ This(_) if !tree.symbol.isLocalToReifee =>
if (reifyDebug) println("This for %s, reified as This".format(tree.symbol))
mirrorBuildCall(nme.This, reify(tree.symbol))
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index 1d2e177688..7aa87dc2f8 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -73,7 +73,7 @@ trait GenTypes {
if (tpe.isSpliceable && !(quantified contains tpe.typeSymbol)) {
if (reifyDebug) println("splicing " + tpe)
- val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.AbsTypeTag.toString
+ val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
val key = (tagFlavor, tpe.typeSymbol)
// if this fails, it might produce the dreaded "erroneous or inaccessible type" error
// to find out the whereabouts of the error run scalac with -Ydebug
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 2b7733fb6c..49877b4286 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -70,6 +70,9 @@ trait GenUtils {
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
+ def mkListMap(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.ListMap", args: _*)
+
/**
* An (unreified) path that refers to definition with given fully qualified name
* @param mkName Creator for last portion of name (either TermName or TypeName)
@@ -131,7 +134,7 @@ trait GenUtils {
def isCrossStageTypeBearer(tree: Tree): Boolean = tree match {
case TypeApply(hk, _) => isCrossStageTypeBearer(hk)
- case Select(sym @ Select(_, ctor), nme.apply) if ctor == nme.AbsTypeTag || ctor == nme.TypeTag || ctor == nme.Expr => true
+ case Select(sym @ Select(_, ctor), nme.apply) if ctor == nme.WeakTypeTag || ctor == nme.TypeTag || ctor == nme.Expr => true
case _ => false
}
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index a253effc1c..a76f147dc4 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,6 +1,6 @@
package scala.reflect
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.base.{Universe => BaseUniverse}
import scala.reflect.macros.{Context, ReificationError, UnexpectedReificationError}
import scala.tools.nsc.Global
@@ -26,7 +26,14 @@ package object reify {
private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
import global._
import definitions._
- val enclosingErasure = reifyEnclosingRuntimeClass(global)(typer0)
+ val enclosingErasure = {
+ val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
+ // HACK around SI-6259
+ // If we're in the constructor of an object or others don't have easy access to `this`, we have no good way to grab
+ // the class of that object. Instead, we construct an anonymous class and grab his class file, assuming
+ // this is enough to get the correct class loadeer for the class we *want* a mirror for, the object itself.
+ rClassTree orElse Apply(Select(treeBuilder.makeAnonymousNew(Nil), sn.GetClass), Nil)
+ }
// JavaUniverse is defined in scala-reflect.jar, so we must be very careful in case someone reifies stuff having only scala-library.jar on the classpath
val isJavaUniverse = JavaUniverseClass != NoSymbol && universe.tpe <:< JavaUniverseClass.toTypeConstructor
if (isJavaUniverse && !enclosingErasure.isEmpty) Apply(Select(universe, nme.runtimeMirror), List(Select(enclosingErasure, sn.GetClassLoader)))
@@ -61,6 +68,8 @@ package object reify {
}
}
+ // Note: If current context is inside the constructor of an object or otherwise not inside
+ // a class/object body, this will return an EmptyTree.
def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
import global._
import definitions._
@@ -68,8 +77,15 @@ package object reify {
if (isThisInScope) {
val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
val classInScope = enclosingClasses.headOption getOrElse EmptyTree
+ def isUnsafeToUseThis = {
+ val isInsideConstructorSuper = typer0.context.enclosingContextChain exists (_.inSelfSuperCall)
+ // Note: It's ok to check for any object here, because if we were in an enclosing class, we'd already have returned its classOf
+ val isInsideObject = typer0.context.enclosingContextChain map (_.tree) exists { case _: ModuleDef => true; case _ => false }
+ isInsideConstructorSuper && isInsideObject
+ }
if (!classInScope.isEmpty) reifyRuntimeClass(global)(typer0, classInScope.symbol.toTypeConstructor, concrete = true)
- else Select(This(tpnme.EMPTY), sn.GetClass)
+ else if(!isUnsafeToUseThis) Select(This(tpnme.EMPTY), sn.GetClass)
+ else EmptyTree
} else EmptyTree
}
}
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index 1624bbe951..fbbd12a42f 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -102,7 +102,7 @@ trait Metalevels {
*/
val metalevels = new Transformer {
var insideSplice = false
- var inlineableBindings = collection.mutable.Map[TermName, Tree]()
+ var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
def withinSplice[T](op: => T) = {
val old = insideSplice
@@ -147,4 +147,4 @@ trait Metalevels {
super.transform(tree)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index fcf3c0e65c..baeea8cd9d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -103,8 +103,8 @@ trait Reshape {
// of, say, ClassTag or TypeTag
case Apply(TypeApply(_, List(tt)), _) if original.symbol == MacroInternal_materializeClassTag =>
gen.mkNullaryCall(Predef_implicitly, List(appliedType(ClassTagClass, tt.tpe)))
- case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == MacroInternal_materializeAbsTypeTag =>
- gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, AbsTypeTagClass, List(tt.tpe))))
+ case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == MacroInternal_materializeWeakTypeTag =>
+ gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe))))
case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == MacroInternal_materializeTypeTag =>
gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, TypeTagClass, List(tt.tpe))))
case _ =>
@@ -250,7 +250,7 @@ trait Reshape {
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap
- val accessors = collection.mutable.Map[ValDef, List[DefDef]]()
+ val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]()
stats collect { case ddef: DefDef => ddef } foreach (defdef => {
val valdef = symdefs get defdef.symbol.accessedOrSelf collect { case vdef: ValDef => vdef } getOrElse null
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
@@ -323,4 +323,4 @@ trait Reshape {
isSynthetic && isCaseCompanion
}))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index ebe3957e69..1df9efbb82 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -94,7 +94,7 @@ trait Extractors {
object ReifiedTree {
def apply(universe: Tree, mirror: Tree, symtab: SymbolTable, rtree: Tree, tpe: Type, rtpe: Tree, concrete: Boolean): Tree = {
- val tagFactory = if (concrete) nme.TypeTag else nme.AbsTypeTag
+ val tagFactory = if (concrete) nme.TypeTag else nme.WeakTypeTag
val tagCtor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), tagFactory), nme.apply), List(TypeTree(tpe)))
val exprCtor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), nme.Expr), nme.apply), List(TypeTree(tpe)))
val tagArgs = List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TYPECREATOR_PREFIX, symtab, rtpe))
@@ -122,7 +122,7 @@ trait Extractors {
object ReifiedType {
def apply(universe: Tree, mirror: Tree, symtab: SymbolTable, tpe: Type, rtpe: Tree, concrete: Boolean) = {
- val tagFactory = if (concrete) nme.TypeTag else nme.AbsTypeTag
+ val tagFactory = if (concrete) nme.TypeTag else nme.WeakTypeTag
val ctor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), tagFactory), nme.apply), List(TypeTree(tpe)))
val args = List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TYPECREATOR_PREFIX, symtab, rtpe))
val unwrapped = Apply(ctor, args)
@@ -176,12 +176,11 @@ trait Extractors {
List(
_,
_,
- binding,
Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
Literal(Constant(origin: String)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, binding, flags, origin)
+ Some(uref1, name, reifyBinding(tree), flags, origin)
case _ =>
None
}
@@ -194,22 +193,11 @@ trait Extractors {
Select(Select(uref1 @ Ident(_), build1), newFreeType),
List(
_,
- _,
- value,
Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
Literal(Constant(origin: String)))))
- if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && (newFreeType == nme.newFreeType || newFreeType == nme.newFreeExistential) &&
+ if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- value match {
- case Apply(TypeApply(Select(Select(uref3 @ Ident(_), typeTag), apply), List(binding)), List(Literal(Constant(null)), _))
- if uref3.name == nme.UNIVERSE_SHORT && typeTag == nme.TypeTag && apply == nme.apply =>
- Some(uref1, name, binding, flags, origin)
- case Apply(TypeApply(Select(uref3 @ Ident(_), typeTag), List(binding)), List(Literal(Constant(null)), _))
- if uref3.name == nme.UNIVERSE_SHORT && typeTag == nme.TypeTag =>
- Some(uref1, name, binding, flags, origin)
- case _ =>
- throw new Error("unsupported free type def: %s%n%s".format(value, showRaw(value)))
- }
+ Some(uref1, name, reifyBinding(tree), flags, origin)
case _ =>
None
}
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index ec1f132c1b..b2999c3c1c 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -40,7 +40,7 @@ trait NodePrinters {
})
s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
- val buf = new collection.mutable.ListBuffer[String]
+ val buf = new scala.collection.mutable.ListBuffer[String]
val annotations = m.group(3)
if (buf.nonEmpty || annotations != "")
@@ -73,10 +73,10 @@ trait NodePrinters {
s.trim
})
- val printout = collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]();
printout += universe.trim
if (mirrorIsUsed) printout += mirror.replace("MirrorOf[", "scala.reflect.base.MirrorOf[").trim
- val imports = collection.mutable.ListBuffer[String]();
+ val imports = scala.collection.mutable.ListBuffer[String]();
imports += nme.UNIVERSE_SHORT
// if (buildIsUsed) imports += nme.build
if (mirrorIsUsed) imports += nme.MIRROR_SHORT
@@ -94,7 +94,7 @@ trait NodePrinters {
if (isExpr) {
if (mirror contains ".getClassLoader") {
printout += "import scala.tools.reflect.ToolBox"
- printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().runExpr(tree))"
+ printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().eval(tree))"
} else {
printout += "println(tree)"
}
diff --git a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
index abbed814e0..0b9cf58c89 100644
--- a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
+++ b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
@@ -6,7 +6,13 @@ trait StdAttachments {
import global._
- case class ReifyBindingAttachment(binding: Symbol)
+ case class ReifyBindingAttachment(binding: Tree)
- case class ReifyAliasAttachment(binding: Symbol, alias: TermName)
+ def reifyBinding(tree: Tree): Tree =
+ tree.attachments.get[ReifyBindingAttachment] match {
+ case Some(ReifyBindingAttachment(binding)) => binding
+ case other => Ident(NoSymbol)
+ }
+
+ case class ReifyAliasAttachment(sym: Symbol, alias: TermName)
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index 3892c86dd3..3ec43c863d 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -17,6 +17,7 @@ trait SymbolTables {
private[SymbolTable] val original: Option[List[Tree]] = None) {
def syms: List[Symbol] = symtab.keys.toList
+ def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
@@ -45,28 +46,27 @@ trait SymbolTables {
def symRef(sym: Symbol): Tree =
symtab.get(sym) match {
- case Some(FreeDef(_, name, _, _, _)) => Ident(name) addAttachment ReifyBindingAttachment(sym)
- case Some(SymDef(_, name, _, _)) => Ident(name) addAttachment ReifyBindingAttachment(sym)
+ case Some(FreeDef(_, name, binding, _, _)) => Ident(name) updateAttachment binding
+ case Some(SymDef(_, name, _, _)) => Ident(name) updateAttachment ReifyBindingAttachment(Ident(sym))
case None => EmptyTree
}
def +(sym: Symbol, name: TermName, reification: Tree): SymbolTable = add(sym, name, reification)
- def +(sym: Symbol, name: TermName): SymbolTable = add(sym, name)
def +(symDef: Tree): SymbolTable = add(symDef)
def ++(symDefs: TraversableOnce[Tree]): SymbolTable = (this /: symDefs)((symtab, symDef) => symtab.add(symDef))
def ++(symtab: SymbolTable): SymbolTable = { val updated = this ++ symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases ++ symtab.aliases) }
def -(sym: Symbol): SymbolTable = remove(sym)
def -(name: TermName): SymbolTable = remove(name)
- def -(symDef: Tree): SymbolTable = remove(binding(symDef))
+ def -(symDef: Tree): SymbolTable = remove(reifyBinding(symDef).symbol)
def --(syms: GenTraversableOnce[Symbol]): SymbolTable = (this /: syms)((symtab, sym) => symtab.remove(sym))
def --(names: Iterable[TermName]): SymbolTable = (this /: names)((symtab, name) => symtab.remove(name))
- def --(symDefs: TraversableOnce[Tree]): SymbolTable = this -- (symDefs map (binding(_)))
+ def --(symDefs: TraversableOnce[Tree]): SymbolTable = this -- (symDefs map (reifyBinding(_)))
def --(symtab: SymbolTable): SymbolTable = { val updated = this -- symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases diff symtab.aliases) }
def filterSyms(p: Symbol => Boolean): SymbolTable = this -- (syms filterNot p)
def filterAliases(p: (Symbol, TermName) => Boolean): SymbolTable = this -- (aliases filterNot (tuple => p(tuple._1, tuple._2)) map (_._2))
private def add(symDef: Tree): SymbolTable = {
- val sym = binding(symDef)
+ val sym = reifyBinding(symDef).symbol
assert(sym != NoSymbol, showRaw(symDef))
val name = symDef match {
case FreeDef(_, name, _, _, _) => name
@@ -85,7 +85,8 @@ trait SymbolTables {
val fresh = typer.context.unit.fresh
newTermName(fresh.newName(name))
}
- add(ValDef(NoMods, freshName(name0), TypeTree(), reification) addAttachment ReifyBindingAttachment(sym))
+ val bindingAttachment = reification.attachments.get[ReifyBindingAttachment].get
+ add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
private def add(sym: Symbol, name: TermName): SymbolTable = {
@@ -115,12 +116,6 @@ trait SymbolTables {
new SymbolTable(newSymtab, newAliases)
}
- private def binding(tree: Tree): Symbol =
- tree.attachments.get[ReifyBindingAttachment] match {
- case Some(ReifyBindingAttachment(binding)) => binding
- case other => NoSymbol
- }
-
private val cache = mutable.Map[SymbolTable, List[Tree]]()
def encode: List[Tree] = cache.getOrElseUpdate(this, SymbolTable.encode(this)) map (_.duplicate)
@@ -147,7 +142,7 @@ trait SymbolTables {
def apply(encoded: List[Tree]): SymbolTable = {
var result = new SymbolTable(original = Some(encoded))
encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
- case (Some(ReifyBindingAttachment(sym)), _) => result += entry
+ case (Some(ReifyBindingAttachment(_)), _) => result += entry
case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
})
@@ -169,30 +164,26 @@ trait SymbolTables {
def fillInSymbol(sym: Symbol): Tree = {
if (reifyDebug) println("Filling in: %s (%s)".format(sym, sym.accurateKindString))
- val isFree = currtab.symName(sym) startsWith nme.REIFY_FREE_PREFIX
- if (isFree) {
- if (sym.annotations.isEmpty) EmptyTree
- else Apply(Select(currtab.symRef(sym), nme.setAnnotations), List(reifier.reify(sym.annotations)))
- } else {
- // SI-6204 don't reify signatures for incomplete symbols, because this might lead to cyclic reference errors
- val signature =
- if (sym.isInitialized) {
- if (sym.isCapturedVariable) capturedVariableType(sym)
- else sym.info
- } else NoType
- val rset = reifier.mirrorBuildCall(nme.setTypeSignature, currtab.symRef(sym), reifier.reify(signature))
- // `Symbol.annotations` doesn't initialize the symbol, so we don't need to do anything special here
- // also since we call `sym.info` a few lines above, by now the symbol will be initialized (if possible)
- // so the annotations will be filled in and will be waiting to be reified (unless symbol initialization is prohibited as described above)
- if (sym.annotations.isEmpty) rset
- else reifier.mirrorBuildCall(nme.setAnnotations, rset, reifier.mkList(sym.annotations map reifier.reifyAnnotationInfo))
- }
+ val isFreeTerm = FreeTermDef.unapply(currtab.symDef(sym)).isDefined
+ // SI-6204 don't reify signatures for incomplete symbols, because this might lead to cyclic reference errors
+ val signature =
+ if (sym.isInitialized) {
+ if (sym.isCapturedVariable) capturedVariableType(sym)
+ else if (isFreeTerm) sym.tpe
+ else sym.info
+ } else NoType
+ val rset = reifier.mirrorBuildCall(nme.setTypeSignature, currtab.symRef(sym), reifier.reify(signature))
+ // `Symbol.annotations` doesn't initialize the symbol, so we don't need to do anything special here
+ // also since we call `sym.info` a few lines above, by now the symbol will be initialized (if possible)
+ // so the annotations will be filled in and will be waiting to be reified (unless symbol initialization is prohibited as described above)
+ if (sym.annotations.isEmpty) rset
+ else reifier.mirrorBuildCall(nme.setAnnotations, rset, reifier.mkList(sym.annotations map reifier.reifyAnnotationInfo))
}
// `fillInSymbol` might add symbols to `symtab`, that's why this is done iteratively
var progress = 0
while (progress < cumulativeSymtab.length) {
- val sym = currtab.binding(cumulativeSymtab(progress))
+ val sym = reifyBinding(cumulativeSymtab(progress)).symbol
if (sym != NoSymbol) {
val symtabProgress = currtab.symtab.size
val aliasesProgress = currtab.aliases.length
@@ -207,12 +198,12 @@ trait SymbolTables {
val withAliases = cumulativeSymtab flatMap (entry => {
val result = mutable.ListBuffer[Tree]()
result += entry
- val sym = currtab.binding(entry)
+ val sym = reifyBinding(entry).symbol
if (sym != NoSymbol)
result ++= cumulativeAliases.distinct filter (alias => alias._1 == sym && alias._2 != currtab.symName(sym)) map (alias => {
val canonicalName = currtab.symName(sym)
val aliasName = alias._2
- ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) addAttachment ReifyAliasAttachment(sym, aliasName)
+ ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) updateAttachment ReifyAliasAttachment(sym, aliasName)
})
result.toList
})
diff --git a/src/compiler/scala/tools/ant/ClassloadVerify.scala b/src/compiler/scala/tools/ant/ClassloadVerify.scala
index 33a20f6894..d1d557b9d3 100644
--- a/src/compiler/scala/tools/ant/ClassloadVerify.scala
+++ b/src/compiler/scala/tools/ant/ClassloadVerify.scala
@@ -10,7 +10,7 @@ package scala.tools.ant
import org.apache.tools.ant.Project
import org.apache.tools.ant.types.{Path, Reference}
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
import scala.tools.util.VerifyClass
class ClassloadVerify extends ScalaMatchingTask {
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index 843ee043ea..7165474345 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -11,11 +11,11 @@ package scala.tools.ant.sabbus
import java.net.URL
-object Compilers extends collection.DefaultMap[String, Compiler] {
+object Compilers extends scala.collection.DefaultMap[String, Compiler] {
val debug = false
- private val container = new collection.mutable.HashMap[String, Compiler]
+ private val container = new scala.collection.mutable.HashMap[String, Compiler]
def iterator = container.iterator
diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala
index aae5bebcc8..bde7bb8cb8 100644
--- a/src/compiler/scala/tools/cmd/Property.scala
+++ b/src/compiler/scala/tools/cmd/Property.scala
@@ -64,7 +64,7 @@ trait Property extends Reference {
propertiesToOptions(loadProperties(file))
def propertiesToOptions(props: java.util.Properties): List[String] = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
propertiesToOptions(props.toList)
}
def propertiesToOptions(props: List[(String, String)]) = props flatMap propMapper
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index 77fe01051e..b6c564e9fb 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -6,7 +6,7 @@
package scala.tools
package cmd
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
import nsc.Properties.envOrNone
/** Mixes in the specification trait and uses the vals therein to
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 7842603af7..6d652ffdfe 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -14,7 +14,7 @@ trait AnyValReps {
sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
case class Op(val op : String, val doc : String)
-
+
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
"""implicit def @javaequiv@2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
@@ -24,7 +24,7 @@ trait AnyValReps {
def coercionComment = """
/** Language mandated coercions from @name@ to "wider" types.%s
*/""".format(coercionCommentExtra)
-
+
def implicitCoercions: List[String] = {
val coercions = this match {
case B => companionCoercions(S, I, L, F, D)
@@ -247,7 +247,7 @@ trait AnyValReps {
def classDoc = interpolate(classDocTemplate)
def objectDoc = ""
def mkImports = ""
-
+
def mkClass = assemble("final abstract class " + name + " private extends AnyVal", classLines)
def mkObject = assemble("object " + name + " extends AnyValCompanion", objectLines)
def make() = List[String](
@@ -281,7 +281,7 @@ trait AnyValTemplates {
%s
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
""".trim.format(timestampString) + "\n\n")
@@ -341,9 +341,6 @@ final val NaN = @boxed@.NaN
final val PositiveInfinity = @boxed@.POSITIVE_INFINITY
final val NegativeInfinity = @boxed@.NEGATIVE_INFINITY
-@deprecated("use @name@.MinPositiveValue instead", "2.9.0")
-final val Epsilon = MinPositiveValue
-
/** The negative number with the greatest (finite) absolute value which is representable
* by a @name@. Note that it differs from [[java.lang.@name@.MIN_VALUE]], which
* is the smallest positive value representable by a @name@. In Scala that number
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index b94c640f1c..ff3d41c8b7 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -6,7 +6,7 @@
package scala.tools.cmd
package gen
-import language.postfixOps
+import scala.language.postfixOps
class Codegen(args: List[String]) extends {
val parsed = CodegenSpec(args: _*)
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index 5be98a460a..8c6716be78 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -9,8 +9,8 @@ package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
// make some language features in this package compile without warning
- implicit def implicitConversions = language.implicitConversions
- implicit def postfixOps = language.postfixOps
+ implicit def implicitConversions = scala.language.implicitConversions
+ implicit def postfixOps = scala.language.postfixOps
private[cmd] def debug(msg: String) = println(msg)
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index da03419d8a..bd1381faf5 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import annotation.tailrec
+import scala.annotation.tailrec
import java.io.EOFException
trait EvalLoop {
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 80e9ede271..6fb6b1736b 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -7,13 +7,13 @@ package scala.tools.nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import compat.Platform.currentTime
+import scala.compat.Platform.currentTime
import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
-import scala.reflect.internal.util.{ NoPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
@@ -29,8 +29,8 @@ import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
import backend.jvm.{GenJVM, GenASM}
import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
-import language.postfixOps
-import reflect.internal.StdAttachments
+import scala.language.postfixOps
+import scala.reflect.internal.StdAttachments
import scala.reflect.ClassTag
class Global(var currentSettings: Settings, var reporter: Reporter)
@@ -74,11 +74,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- // fulfilling requirements
- // Renamed AbstractFile to AbstractFileType for backward compatibility:
- // it is difficult for sbt to work around the ambiguity errors which result.
- type AbstractFileType = scala.tools.nsc.io.AbstractFile
-
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
@@ -101,6 +96,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Generate ASTs */
type TreeGen = scala.tools.nsc.ast.TreeGen
+ /** Tree generation, usually based on existing symbols. */
override object gen extends {
val global: Global.this.type = Global.this
} with TreeGen {
@@ -108,6 +104,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
typer.typed(mkCast(tree, pt))
}
+ /** Trees fresh from the oven, mostly for use by the parser. */
+ object treeBuilder extends {
+ val global: Global.this.type = Global.this
+ } with TreeBuilder {
+ def freshName(prefix: String): Name = freshTermName(prefix)
+ def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
+ def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
+ def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
+ }
+
/** Fold constants */
object constfold extends {
val global: Global.this.type = Global.this
@@ -271,9 +278,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
}
- def shouldLogAtThisPhase = (
- (settings.log.isSetByUser)
- && ((settings.log containsPhase globalPhase) || (settings.log containsPhase phase))
+ override def shouldLogAtThisPhase = settings.log.isSetByUser && (
+ (settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
)
// Over 200 closure objects are eliminated by inlining this.
@inline final def log(msg: => AnyRef) {
@@ -934,7 +940,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
reSync(RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed)
}
}
- def show(msg: String, syms: collection.Traversable[Symbol]) =
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
if (syms.nonEmpty)
informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
show("invalidated packages", invalidated)
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index e69382be21..46cdc6a4a0 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
import java.io.{ BufferedWriter, FileWriter }
import scala.collection.mutable
-import language.postfixOps
+import scala.language.postfixOps
/**
* PhaseAssembly
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
index d2274b108b..c80be474a6 100644
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ b/src/compiler/scala/tools/nsc/Phases.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
import symtab.Flags
-import reflect.internal.util.TableDef
-import language.postfixOps
+import scala.reflect.internal.util.TableDef
+import scala.language.postfixOps
object Phases {
val MaxPhases = 64
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 0b54eda66d..d1faa4d219 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package ast
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
import symtab.Flags._
-import language.postfixOps
+import scala.language.postfixOps
/** The object `nodePrinter` converts the internal tree
* representation to a string.
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 885fc3f518..3392b78595 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -10,7 +10,7 @@ import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-trait Printers extends reflect.internal.Printers { this: Global =>
+trait Printers extends scala.reflect.internal.Printers { this: Global =>
import treeInfo.{ IsTrue, IsFalse }
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 043834ae55..267a5dcefd 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -18,7 +18,7 @@ import scala.concurrent.Lock
import scala.text._
import symtab.Flags._
import symtab.SymbolTable
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* Tree browsers can show the AST in a graphical and interactive
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index e90d779885..01bd0bbb06 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -9,7 +9,7 @@ package ast
import PartialFunction._
import symtab.Flags
-import language.implicitConversions
+import scala.language.implicitConversions
/** A DSL for generating scala code. The goal is that the
* code generating code should look a lot like the code it
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index b22681e52b..fc8228f644 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -9,12 +9,12 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
import symtab.SymbolTable
-import language.postfixOps
+import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
*/
-abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
+abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
val global: Global
import global._
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index e755553e25..9e46155d14 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package ast
-import reflect.internal.HasFlags
-import reflect.internal.Flags._
+import scala.reflect.internal.HasFlags
+import scala.reflect.internal.Flags._
import symtab._
/** This class ...
@@ -15,7 +15,7 @@ import symtab._
* @author Martin Odersky
* @version 1.0
*/
-abstract class TreeInfo extends reflect.internal.TreeInfo {
+abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
val global: Global
import global._
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 085ce82025..dec7b648ee 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -15,7 +15,7 @@ import scala.reflect.internal.Flags.PRESUPER
import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
-trait Trees extends reflect.internal.Trees { self: Global =>
+trait Trees extends scala.reflect.internal.Trees { self: Global =>
def treeLine(t: Tree): String =
if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
@@ -48,12 +48,12 @@ trait Trees extends reflect.internal.Trees { self: Global =>
override def isType = definition.isType
}
- /** Array selection <qualifier> . <name> only used during erasure */
+ /** Array selection `<qualifier> . <name>` only used during erasure */
case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
extends RefTree with TermTree
- /** Derived value class injection (equivalent to: new C(arg) after easure); only used during erasure
- * The class C is stored as the symbol of the tree node.
+ /** Derived value class injection (equivalent to: `new C(arg)` after erasure); only used during erasure.
+ * The class `C` is stored as a tree attachment.
*/
case class InjectDerivedValue(arg: Tree)
extends SymTree
@@ -178,7 +178,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case _ => super.xtraverse(traverser, tree)
}
- trait TreeCopier extends super.TreeCopierOps {
+ trait TreeCopier extends super.InternalTreeCopierOps {
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue
@@ -281,7 +281,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
val trace = scala.tools.nsc.util.trace when debug
val locals = util.HashSet[Symbol](8)
- val orderedLocals = collection.mutable.ListBuffer[Symbol]()
+ val orderedLocals = scala.collection.mutable.ListBuffer[Symbol]()
def registerLocal(sym: Symbol) {
if (sym != null && sym != NoSymbol) {
if (debug && !(locals contains sym)) orderedLocals append sym
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 17bea7f796..eaee39d7e6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -280,14 +280,6 @@ self =>
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
- object treeBuilder extends TreeBuilder {
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = Parser.this.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = Parser.this.freshTypeName(prefix)
- def o2p(offset: Int) = Parser.this.o2p(offset)
- def r2p(start: Int, point: Int, end: Int) = Parser.this.r2p(start, point, end)
- }
import treeBuilder.{global => _, _}
/** The types of the context bounds of type parameters of the surrounding class
@@ -404,8 +396,7 @@ self =>
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
- def mainNew = makeNew(Nil, emptyValDef, stmts, ListOfNil, NoPosition, NoPosition)
- def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
+ def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
@@ -477,7 +468,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -1038,7 +1029,7 @@ self =>
val tok = in.token
val name = ident()
t = atPos(start) {
- if (tok == BACKQUOTED_IDENT) Ident(name) addAttachment BackquotedIdentifierAttachment
+ if (tok == BACKQUOTED_IDENT) Ident(name) updateAttachment BackquotedIdentifierAttachment
else Ident(name)
}
if (in.token == DOT) {
@@ -1302,7 +1293,7 @@ self =>
placeholderParams = placeholderParams ::: savedPlaceholderParams
res
}
-
+
def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
@@ -1366,7 +1357,8 @@ self =>
}
parseDo
case FOR =>
- def parseFor = atPos(in.skipToken()) {
+ val start = in.skipToken()
+ def parseFor = atPos(start) {
val enums =
if (in.token == LBRACE) inBracesOrNil(enumerators())
else inParensOrNil(enumerators())
@@ -1378,7 +1370,11 @@ self =>
makeFor(enums, expr())
}
}
- parseFor
+ def adjustStart(tree: Tree) =
+ if (tree.pos.isRange && start < tree.pos.start)
+ tree setPos tree.pos.withStart(start)
+ else tree
+ adjustStart(parseFor)
case RETURN =>
def parseReturn =
atPos(in.skipToken()) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index e6bf43fe93..ba8da3b0ec 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -754,8 +754,12 @@ trait Scanners extends ScannersCommon {
} else {
val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
if (isUnclosedLiteral) {
- syntaxError(if (!multiLine) "unclosed string literal" else "unclosed multi-line string literal")
- } else {
+ if (multiLine)
+ incompleteInputError("unclosed multi-line string literal")
+ else
+ syntaxError("unclosed string literal")
+ }
+ else {
putChar(ch)
nextRawChar()
getStringPart(multiLine)
@@ -1298,7 +1302,7 @@ trait Scanners extends ScannersCommon {
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
init()
@@ -1313,7 +1317,7 @@ trait Scanners extends ScannersCommon {
var lineCount = 1
var lastOffset = 0
var indent = 0
- val oldBalance = collection.mutable.Map[Int, Int]()
+ val oldBalance = scala.collection.mutable.Map[Int, Int]()
def markBalance() = for ((k, v) <- balance) oldBalance(k) = v
markBalance()
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 146329183c..9466b7222d 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -7,11 +7,11 @@ package scala.tools.nsc
package ast.parser
import scala.collection.{ mutable, immutable }
-import xml.{ EntityRef, Text }
-import xml.XML.{ xmlns }
+import scala.xml.{ EntityRef, Text }
+import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.StringOps.splitWhere
-import language.implicitConversions
+import scala.language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -144,7 +144,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
(buf map convertToTextPat).toList
def parseAttribute(pos: Position, s: String): Tree = {
- val ts = xml.Utility.parseAttributeValue(s) map {
+ val ts = scala.xml.Utility.parseAttributeValue(s) map {
case Text(s) => text(pos, s)
case EntityRef(s) => entityRef(pos, s)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 7e7972f9dd..9ce74b2b17 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package ast.parser
-import annotation.switch
+import scala.annotation.switch
/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
* as one might like because JavaTokens for no clear reason chose new numbers for
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index edf747486a..afafff4a64 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -199,6 +199,15 @@ abstract class TreeBuilder {
}
}
+ /** Creates a tree representing new Object { stats }.
+ * To make sure an anonymous subclass of Object is created,
+ * if there are no stats, a () is added.
+ */
+ def makeAnonymousNew(stats: List[Tree]): Tree = {
+ val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+ makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
+ }
+
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 486a43614b..06492e4ac6 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -11,7 +11,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, ArrayBuffer }
import scala.reflect.internal.util.{ Position, NoPosition }
import backend.icode.analysis.ProgramPoint
-import language.postfixOps
+import scala.language.postfixOps
trait BasicBlocks {
self: ICodes =>
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 431802d185..2fa9c076dd 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -13,7 +13,7 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
-import language.postfixOps
+import scala.language.postfixOps
/** This class ...
*
@@ -121,42 +121,26 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- if (m.symbol.isAccessor && m.symbol.accessed.hasStaticAnnotation) {
- // in companion object accessors to @static fields, we access the static field directly
- val hostClass = m.symbol.owner.companionClass
- val staticfield = hostClass.info.findMember(m.symbol.accessed.name, NoFlags, NoFlags, false)
-
- if (m.symbol.isGetter) {
- ctx1.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos)
- ctx1.bb.closeWith(RETURN(m.returnType))
- } else if (m.symbol.isSetter) {
- ctx1.bb.emit(LOAD_LOCAL(m.locals.head), tree.pos)
- ctx1.bb.emit(STORE_FIELD(staticfield, true), tree.pos)
+ ctx1 = genLoad(rhs, ctx1, m.returnType);
+
+ // reverse the order of the local variables, to match the source-order
+ m.locals = m.locals.reverse
+
+ rhs match {
+ case Block(_, Return(_)) => ()
+ case Return(_) => ()
+ case EmptyTree =>
+ globalError("Concrete method has no definition: " + tree + (
+ if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+ else "")
+ )
+ case _ => if (ctx1.bb.isEmpty)
+ ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
+ else
ctx1.bb.closeWith(RETURN(m.returnType))
- } else assert(false, "unreachable")
- } else {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
-
- // reverse the order of the local variables, to match the source-order
- m.locals = m.locals.reverse
-
- rhs match {
- case Block(_, Return(_)) => ()
- case Return(_) => ()
- case EmptyTree =>
- globalError("Concrete method has no definition: " + tree + (
- if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
- else "")
- )
- case _ =>
- if (ctx1.bb.isEmpty)
- ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
- else
- ctx1.bb.closeWith(RETURN(m.returnType))
- }
- if (!ctx1.bb.closed) ctx1.bb.close
- prune(ctx1.method)
}
+ if (!ctx1.bb.closed) ctx1.bb.close
+ prune(ctx1.method)
} else
ctx1.method.setCode(NoCode)
ctx1
@@ -659,16 +643,16 @@ abstract class GenICode extends SubComponent {
} else {
val sym = tree.symbol
val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
-
+
if (rhs == EmptyTree) {
debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
ctx.bb.emit(getZeroOf(local.kind))
}
-
+
var ctx1 = ctx
if (rhs != EmptyTree)
ctx1 = genLoad(rhs, ctx, local.kind);
-
+
ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
ctx1.scope.add(local)
ctx1.bb.emit(SCOPE_ENTER(local))
@@ -727,10 +711,10 @@ abstract class GenICode extends SubComponent {
ctx1.bb.enterIgnoreMode
generatedType = expectedType
ctx1
- }
+ }
genLoadReturn
- case t @ Try(_, _, _) =>
+ case t @ Try(_, _, _) =>
genLoadTry(t, ctx, generatedType = _)
case Throw(expr) =>
@@ -750,7 +734,7 @@ abstract class GenICode extends SubComponent {
case Object_asInstanceOf => true
case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
}
-
+
val Select(obj, _) = fun
val l = toTypeKind(obj.tpe)
val r = toTypeKind(targs.head.tpe)
@@ -794,7 +778,7 @@ abstract class GenICode extends SubComponent {
ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx)
-
+
ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos)
generatedType =
if (fun.symbol.isConstructor) UNIT
@@ -812,7 +796,7 @@ abstract class GenICode extends SubComponent {
val ctor = fun.symbol
debugassert(ctor.isClassConstructor,
"'new' call to non-constructor: " + ctor.name)
-
+
generatedType = toTypeKind(tpt.tpe)
debugassert(generatedType.isReferenceType || generatedType.isArrayType,
"Non reference type cannot be instantiated: " + generatedType)
@@ -858,7 +842,7 @@ abstract class GenICode extends SubComponent {
ctx1
}
ctx2
-
+
case _ =>
abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
}
@@ -898,51 +882,10 @@ abstract class GenICode extends SubComponent {
generatedType = toTypeKind(fun.symbol.tpe.resultType)
ctx1
- case app @ Apply(fun @ Select(qual, _), args)
- if !ctx.method.symbol.isStaticConstructor
- && fun.symbol.isAccessor && fun.symbol.accessed.hasStaticAnnotation
- && qual.tpe.typeSymbol.orElse(fun.symbol.owner).companionClass != NoSymbol =>
- // bypass the accessor to the companion object and load the static field directly
- // this bypass is not done:
- // - if the static intializer for the static field itself
- // - if there is no companion class of the object owner - this happens in the REPL
- def genLoadApply5 = {
- val sym = fun.symbol
- generatedType = toTypeKind(sym.accessed.info)
- val hostOwner = qual.tpe.typeSymbol.orElse(sym.owner)
- val hostClass = hostOwner.companionClass
- val staticfield = hostClass.info.findMember(sym.accessed.name, NoFlags, NoFlags, false) orElse {
- if (!currentRun.compiles(hostOwner)) {
- // hostOwner was separately compiled -- the static field symbol needs to be recreated in hostClass
- import Flags._
- debuglog("recreating sym.accessed.name: " + sym.accessed.name)
- val objectfield = hostOwner.info.findMember(sym.accessed.name, NoFlags, NoFlags, false)
- val staticfield = hostClass.newVariable(newTermName(sym.accessed.name.toString), tree.pos, STATIC | SYNTHETIC | FINAL) setInfo objectfield.tpe
- staticfield.addAnnotation(definitions.StaticClass)
- hostClass.info.decls enter staticfield
- staticfield
- } else NoSymbol
- }
-
- if (sym.isGetter) {
- ctx.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos)
- ctx
- } else if (sym.isSetter) {
- val ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx)
- ctx1.bb.emit(STORE_FIELD(staticfield, true), tree.pos)
- ctx1.bb.emit(CONSTANT(Constant(false)), tree.pos)
- ctx1
- } else {
- assert(false, "supposedly unreachable")
- ctx
- }
- }
- genLoadApply5
-
case app @ Apply(fun, args) =>
def genLoadApply6 = {
val sym = fun.symbol
-
+
if (sym.isLabel) { // jump to a label
val label = ctx.labels.getOrElse(sym, {
// it is a forward jump, scan for labels
@@ -979,7 +922,7 @@ abstract class GenICode extends SubComponent {
Static(true)
else
Dynamic
-
+
var ctx1 =
if (invokeStyle.hasInstance) {
if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
@@ -987,24 +930,26 @@ abstract class GenICode extends SubComponent {
else
genLoadQualifier(fun, ctx)
} else ctx
-
+
ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
val cm = CALL_METHOD(sym, invokeStyle)
-
+
/** In a couple cases, squirrel away a little extra information in the
* CALL_METHOD for use by GenJVM.
*/
fun match {
case Select(qual, _) =>
val qualSym = findHostClass(qual.tpe, sym)
-
- if (qualSym == ArrayClass) cm setTargetTypeKind toTypeKind(qual.tpe)
- else cm setHostClass qualSym
-
- log(
- if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullName
- else s"Set more precise host class for ${sym.fullName} hostClass: $qualSym"
- )
+ if (qualSym == ArrayClass) {
+ val kind = toTypeKind(qual.tpe)
+ cm setTargetTypeKind kind
+ log(s"Stored target type kind for {$sym.fullName} as $kind")
+ }
+ else {
+ cm setHostClass qualSym
+ if (qual.tpe.typeSymbol != qualSym)
+ log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
+ }
case _ =>
}
ctx1.bb.emit(cm, tree.pos)
@@ -1140,7 +1085,7 @@ abstract class GenICode extends SubComponent {
val elmKind = toTypeKind(tpt.tpe)
generatedType = ARRAY(elmKind)
val elems = _elems.toIndexedSeq
-
+
ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos)
ctx1.bb.emit(CREATE_ARRAY(elmKind, 1))
// inline array literals
@@ -1163,7 +1108,7 @@ abstract class GenICode extends SubComponent {
val afterCtx = ctx1.newBlock
var caseCtx: Context = null
generatedType = toTypeKind(tree.tpe)
-
+
var targets: List[BasicBlock] = Nil
var tags: List[Int] = Nil
var default: BasicBlock = afterCtx.bb
@@ -1190,7 +1135,7 @@ abstract class GenICode extends SubComponent {
abort("Invalid case statement in switch-like pattern match: " +
tree + " at: " + (tree.pos))
}
-
+
caseCtx = genLoad(body, tmpCtx, generatedType)
// close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
@@ -1728,12 +1673,8 @@ abstract class GenICode extends SubComponent {
* backend emits them as static).
* No code is needed for this module symbol.
*/
- for (
- f <- cls.info.decls;
- if !f.isMethod && f.isTerm && !f.isModule && !(f.owner.isModuleClass && f.hasStaticAnnotation)
- ) {
+ for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
ctx.clazz addField new IField(f)
- }
}
/**
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 1c5c9224f2..4739750daa 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -231,7 +231,7 @@ trait Linearizers {
val handlersByCovered = m.exh.groupBy(_.covered)
// number of basic blocks covered by the entire try-catch expression
- def size(covered: collection.immutable.Set[BasicBlock]) = {
+ def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
val hs = handlersByCovered(covered)
covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 1fcb406e96..63f0ab683b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -128,7 +128,7 @@ trait Opcodes { self: ICodes =>
}
/** Clone this instruction. */
- override def clone: Instruction =
+ override def clone(): Instruction =
super.clone.asInstanceOf[Instruction]
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index d1d8e4a385..df158a29ea 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -200,7 +200,7 @@ abstract class CopyPropagation {
in(b) = lattice.bottom
out(b) = lattice.bottom
assert(out.contains(b), out)
- log("Added point: " + b)
+ debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
@@ -531,11 +531,11 @@ abstract class CopyPropagation {
case 0 => ()
case 1 if ctor.tpe.paramTypes.head == ctor.owner.rawowner.tpe =>
// it's an unused outer
- log("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
+ debuglog("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
paramTypes = paramTypes.tail
values = values.tail
case _ =>
- log("giving up on " + ctor + "(diff: " + diff + ")")
+ debuglog("giving up on " + ctor + "(diff: " + diff + ")")
return bindings
}
@@ -566,7 +566,7 @@ abstract class CopyPropagation {
method.blocks map { b =>
"\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
"\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
- }
+ }
).mkString
} /* class CopyAnalysis */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 6bd3ac5791..31c2077097 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -640,7 +640,7 @@ abstract class TypeFlowAnalysis {
For each of them, its `lastInstruction` (after which no more typeflows are needed) is found.
*/
- def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: collection.Set[BasicBlock], staleIn: collection.Set[BasicBlock]) {
+ def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: scala.collection.Set[BasicBlock], staleIn: scala.collection.Set[BasicBlock]) {
if (this.method == null || this.method.symbol != m.symbol) {
init(m)
return
@@ -691,7 +691,7 @@ abstract class TypeFlowAnalysis {
bs foreach enqueue
}
- private def blankOut(blocks: collection.Set[BasicBlock]) {
+ private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
blocks foreach { b =>
in(b) = typeFlowLattice.bottom
out(b) = typeFlowLattice.bottom
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 485864d8e3..ef3e82a75a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.util.JavapClass
import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
import Attributes.Name
-import language.postfixOps
+import scala.language.postfixOps
/** For the last mile: turning generated bytecode in memory into
* something you can use. Has implementations for writing to class
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index b57f5e86a3..28966eef08 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -154,8 +154,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
inform("[running phase " + name + " on icode]")
if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ log(s"Optimizer eliminated ${sym.fullNameString}")
icodes.classes -= sym
+ }
// For predictably ordered error messages.
var sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
@@ -1188,9 +1190,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
log(s"No forwarder for non-public member $m")
else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
- if (m.isAccessor && m.accessed.hasStaticAnnotation) {
- log("@static: accessor " + m + ", accessed: " + m.accessed)
- } else addForwarder(isRemoteClass, jclass, moduleClass, m)
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
}
}
}
@@ -1695,7 +1695,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
- computeLocalVarsIndex(m)
genCode(m, false, true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
@@ -2042,12 +2041,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
var isModuleInitialized = false
- val labels: collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
+ val labels: scala.collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
val onePastLast = new asm.Label // token for the mythical instruction past the last instruction in the method being emitted
// maps a BasicBlock b to the Label that corresponds to b's successor in the linearization. The last BasicBlock is mapped to the onePastLast label.
- val linNext: collection.Map[BasicBlock, asm.Label] = {
+ val linNext: scala.collection.Map[BasicBlock, asm.Label] = {
val result = mutable.HashMap.empty[BasicBlock, asm.Label]
var rest = linearization
var prev = rest.head
@@ -2225,7 +2224,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- def getMerged(): collection.Map[Local, List[Interval]] = {
+ def getMerged(): scala.collection.Map[Local, List[Interval]] = {
// TODO should but isn't: unbalanced start(s) of scope(s)
val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
val merged = mutable.Map[Local, List[Interval]]()
@@ -2410,7 +2409,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
import asm.Opcodes
(instr.category: @scala.annotation.switch) match {
- case icodes.localsCat =>
+ case icodes.localsCat =>
def genLocalInstr = (instr: @unchecked) match {
case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
@@ -2443,7 +2442,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genLocalInstr
- case icodes.stackCat =>
+ case icodes.stackCat =>
def genStackInstr = (instr: @unchecked) match {
case LOAD_MODULE(module) =>
@@ -2471,7 +2470,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
- case icodes.castsCat =>
+ case icodes.castsCat =>
def genCastInstr = (instr: @unchecked) match {
case IS_INSTANCE(tpe) =>
@@ -2501,7 +2500,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genCastInstr
- case icodes.objsCat =>
+ case icodes.objsCat =>
def genObjsInstr = (instr: @unchecked) match {
case BOX(kind) =>
@@ -2521,7 +2520,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genObjsInstr
- case icodes.fldsCat =>
+ case icodes.fldsCat =>
def genFldsInstr = (instr: @unchecked) match {
case lf @ LOAD_FIELD(field, isStatic) =>
@@ -2542,7 +2541,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genFldsInstr
- case icodes.mthdsCat =>
+ case icodes.mthdsCat =>
def genMethodsInstr = (instr: @unchecked) match {
/** Special handling to access native Array.clone() */
@@ -2555,7 +2554,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genMethodsInstr
- case icodes.arraysCat =>
+ case icodes.arraysCat =>
def genArraysInstr = (instr: @unchecked) match {
case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
@@ -2564,7 +2563,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genArraysInstr
- case icodes.jumpsCat =>
+ case icodes.jumpsCat =>
def genJumpInstr = (instr: @unchecked) match {
case sw @ SWITCH(tagss, branches) =>
@@ -2694,7 +2693,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genJumpInstr
- case icodes.retCat =>
+ case icodes.retCat =>
def genRetInstr = (instr: @unchecked) match {
case RETURN(kind) => jcode emitRETURN kind
case THROW(_) => emit(Opcodes.ATHROW)
@@ -2814,9 +2813,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
// TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
// TODO How about adding some asserts to Logical and similar ones to capture the remaining constraint (UNIT not allowed).
- case Logical(op, kind) =>
+ case Logical(op, kind) =>
def genLogical = op match {
- case AND =>
+ case AND =>
kind match {
case LONG => emit(Opcodes.LAND)
case INT => emit(Opcodes.IAND)
@@ -2842,8 +2841,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
genLogical
-
- case Shift(op, kind) =>
+
+ case Shift(op, kind) =>
def genShift = op match {
case LSL =>
kind match {
@@ -2872,7 +2871,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
genShift
- case Comparison(op, kind) =>
+ case Comparison(op, kind) =>
def genCompare = op match {
case CMP =>
(kind: @unchecked) match {
@@ -2887,7 +2886,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
(kind: @unchecked) match {
case FLOAT => emit(Opcodes.FCMPG)
case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
-
+
}
}
genCompare
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 930791d88d..62c281b82f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -18,7 +18,7 @@ import JAccessFlags._
import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
import java.util.jar.{ JarEntry, JarOutputStream }
import scala.tools.nsc.io.AbstractFile
-import language.postfixOps
+import scala.language.postfixOps
/** This class ...
*
@@ -122,8 +122,10 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
inform("[running phase " + name + " on icode]")
if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ log(s"Optimizer eliminated ${sym.fullNameString}")
icodes.classes -= sym
+ }
// For predictably ordered error messages.
val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
@@ -1021,8 +1023,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
method = m
jmethod = clinitMethod
-
- computeLocalVarsIndex(m)
genCode(m)
case None =>
legacyStaticInitializer(cls, clinit)
@@ -1124,9 +1124,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- if (m.isAccessor && m.accessed.hasStaticAnnotation) {
- log("@static: accessor " + m + ", accessed: " + m.accessed)
- } else addForwarder(jclass, moduleClass, m)
+ addForwarder(jclass, moduleClass, m)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index 465a0c61e8..f56aa74d53 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -15,7 +15,7 @@ import scala.tools.nsc.symtab._
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
import ch.epfl.lamp.compiler.msil.util.PECustomMod
-import language.postfixOps
+import scala.language.postfixOps
abstract class GenMSIL extends SubComponent {
import global._
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 7772ccbdd5..eb2da72401 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -35,7 +35,7 @@ abstract class ClosureElimination extends SubComponent {
case (STORE_LOCAL(x), LOAD_LOCAL(y)) if (x == y) =>
var liveOut = liveness.out(bb)
if (!liveOut(x)) {
- log("store/load to a dead local? " + x)
+ debuglog("store/load to a dead local? " + x)
val instrs = bb.getArray
var idx = instrs.length - 1
while (idx > 0 && (instrs(idx) ne i2)) {
@@ -43,7 +43,7 @@ abstract class ClosureElimination extends SubComponent {
idx -= 1
}
if (!liveOut(x)) {
- log("removing dead store/load " + x)
+ log("Removing dead store/load of " + x.sym.initialize.defString)
Some(Nil)
} else None
} else
@@ -84,6 +84,7 @@ abstract class ClosureElimination extends SubComponent {
*/
class ClosureElim {
def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods foreach { m =>
analyzeMethod(m)
peephole(m)
@@ -95,7 +96,6 @@ abstract class ClosureElimination extends SubComponent {
/* Some embryonic copy propagation. */
def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
- log("Analyzing " + m)
cpp.init(m)
cpp.run
@@ -110,23 +110,20 @@ abstract class ClosureElimination extends SubComponent {
t match {
case Deref(This) | Const(_) =>
bb.replaceInstruction(i, valueToInstruction(t));
- log("replaced " + i + " with " + t)
+ debuglog(s"replaced $i with $t")
case _ =>
- bb.replaceInstruction(i, LOAD_LOCAL(info.getAlias(l)))
- log("replaced " + i + " with " + info.getAlias(l))
-
+ val t = info.getAlias(l)
+ bb.replaceInstruction(i, LOAD_LOCAL(t))
+ debuglog(s"replaced $i with $t")
}
case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
def replaceFieldAccess(r: Record) {
val Record(cls, bindings) = r
- info.getFieldNonRecordValue(r, f) match {
- case Some(v) =>
- bb.replaceInstruction(i,
- DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil);
- log("Replaced " + i + " with " + info.getFieldNonRecordValue(r, f));
- case None =>
+ info.getFieldNonRecordValue(r, f) foreach { v =>
+ bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
+ debuglog(s"replaced $i with $v")
}
}
@@ -157,14 +154,14 @@ abstract class ClosureElimination extends SubComponent {
value match {
case Boxed(LocalVar(loc2)) =>
bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(info.getBinding(loc2)) :: Nil)
- log("replaced " + i + " with " + info.getBinding(loc2))
+ debuglog("replaced " + i + " with " + info.getBinding(loc2))
case _ =>
()
}
case Boxed(LocalVar(loc1)) :: _ =>
val loc2 = info.getAlias(loc1)
bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(Deref(LocalVar(loc2))) :: Nil)
- log("replaced " + i + " with " + LocalVar(loc2))
+ debuglog("replaced " + i + " with " + LocalVar(loc2))
case _ =>
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fd949576e1..36a5d61cfb 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -44,6 +44,7 @@ abstract class DeadCodeElimination extends SubComponent {
class DeadCode {
def analyzeClass(cls: IClass) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods.foreach { m =>
this.method = m
dieCodeDie(m)
@@ -73,7 +74,7 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
- log("dead code elimination on " + m);
+ debuglog("dead code elimination on " + m);
dropOf.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
@@ -82,8 +83,10 @@ abstract class DeadCodeElimination extends SubComponent {
mark
sweep(m)
accessedLocals = accessedLocals.distinct
- if ((m.locals diff accessedLocals).nonEmpty) {
- log("Removed dead locals: " + (m.locals diff accessedLocals))
+ val diff = m.locals diff accessedLocals
+ if (diff.nonEmpty) {
+ val msg = diff.map(_.sym.name)mkString(", ")
+ log(s"Removed ${diff.size} dead locals: $msg")
m.locals = accessedLocals.reverse
}
}
@@ -126,7 +129,7 @@ abstract class DeadCodeElimination extends SubComponent {
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); log("marking " + m1)
+ case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); debuglog("marking " + m1)
case CALL_METHOD(m1, SuperCall(_)) =>
worklist += ((bb, idx)) // super calls to constructor
case DROP(_) =>
@@ -173,7 +176,7 @@ abstract class DeadCodeElimination extends SubComponent {
instr match {
case LOAD_LOCAL(l1) =>
for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
- log("\tAdding " + bb1(idx1))
+ debuglog("\tAdding " + bb1(idx1))
worklist += ((bb1, idx1))
}
@@ -197,7 +200,7 @@ abstract class DeadCodeElimination extends SubComponent {
case _ =>
for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- log("\tAdding " + bb1(idx1))
+ debuglog("\tAdding " + bb1(idx1))
worklist += ((bb1, idx1))
}
}
@@ -232,7 +235,7 @@ abstract class DeadCodeElimination extends SubComponent {
} else {
i match {
case NEW(REFERENCE(sym)) =>
- log("skipped object creation: " + sym + "inside " + m)
+ log(s"Eliminated instantation of $sym inside $m")
case _ => ()
}
debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
@@ -240,7 +243,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
if (bb.nonEmpty) bb.close
- else log("empty block encountered")
+ else log(s"empty block encountered in $m")
}
}
@@ -252,7 +255,7 @@ abstract class DeadCodeElimination extends SubComponent {
foreachWithIndex(bb.toList) { (i, idx) =>
if (!useful(bb)(idx)) {
foreachWithIndex(i.consumedTypes.reverse) { (consumedType, depth) =>
- log("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
+ debuglog("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
val (bb, idx) = d
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index f1f597322e..98120f0614 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -93,10 +93,9 @@ abstract class InlineExceptionHandlers extends SubComponent {
val startTime = System.currentTimeMillis
currentClass = c
- log("Starting " + c)
+ debuglog("Starting InlineExceptionHandlers on " + c)
c.methods foreach applyMethod
-
- log("Finished " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
+ debuglog("Finished InlineExceptionHandlers on " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
currentClass = null
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 5464b6fc3b..e9fb060dda 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -44,7 +44,7 @@ abstract class Inliners extends SubComponent {
import definitions.{
NullClass, NothingClass, ObjectClass,
PredefModule, RuntimePackage, ScalaInlineClass, ScalaNoInlineClass,
- isFunctionType
+ isFunctionType, isByNameParamType
}
val phaseName = "inliner"
@@ -143,7 +143,6 @@ abstract class Inliners extends SubComponent {
}
def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
- def posToStr(pos: scala.reflect.internal.util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
/** Is the given class a closure? */
def isClosureClass(cls: Symbol): Boolean =
@@ -194,6 +193,27 @@ abstract class Inliners extends SubComponent {
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
+ private def ownedName(sym: Symbol): String = afterUncurry {
+ val count = (
+ if (!sym.isMethod) 1
+ else if (sym.owner.isAnonymousFunction) 3
+ else 2
+ )
+ (sym.ownerChain take count filterNot (_.isPackageClass)).reverseMap(_.nameString).mkString(".")
+ }
+ private def inlineLog(what: String, main: => String, comment: => String) {
+ def cstr = comment match {
+ case "" => ""
+ case str => " // " + str
+ }
+ val width = if (currentIClazz eq null) 40 else currentIClazz.symbol.enclosingPackage.fullName.length + 25
+ val fmt = "%8s %-" + width + "s" + cstr
+ log(fmt.format(what, main))
+ }
+ private def inlineLog(what: String, main: Symbol, comment: => String) {
+ inlineLog(what, ownedName(main), comment)
+ }
+
val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
private def getRecentTFA(incm: IMethod, forceable: Boolean): (Boolean, analysis.MethodTFA) = {
@@ -244,14 +264,15 @@ abstract class Inliners extends SubComponent {
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
- debuglog("Analyzing " + cls)
+ inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
this.currentIClazz = cls
val ms = cls.methods sorted imethodOrdering
ms foreach { im =>
- if(hasInline(im.symbol)) {
- log("Not inlining into " + im.symbol.originalName.decode + " because it is marked @inline.")
- } else if(im.hasCode && !im.symbol.isBridge) {
+ if (hasInline(im.symbol)) {
+ inlineLog("skip", im.symbol, "no inlining into @inline methods")
+ }
+ else if(im.hasCode && !im.symbol.isBridge) {
analyzeMethod(im)
}
}
@@ -296,6 +317,8 @@ abstract class Inliners extends SubComponent {
* */
def analyzeMethod(m: IMethod): Unit = {
// m.normalize
+ if (settings.debug.value)
+ inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
var sizeBeforeInlining = m.code.blockCount
var instrBeforeInlining = m.code.instructionCount
@@ -306,8 +329,8 @@ abstract class Inliners extends SubComponent {
val fresh = mutable.HashMap.empty[String, Int] withDefaultValue 0
// how many times have we already inlined this method here?
val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
-
val caller = new IMethodInfo(m)
+ def analyzeMessage = s"Analyzing ${caller.length} blocks of $m for inlining sites."
def preInline(isFirstRound: Boolean): Int = {
val inputBlocks = caller.m.linearizedBlocks()
@@ -354,15 +377,17 @@ abstract class Inliners extends SubComponent {
*/
def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
assert(bb.toList contains i, "Candidate callsite does not belong to BasicBlock.")
-
- var inlined = false
val shouldWarn = hasInline(i.method)
- def warnNoInline(reason: String) = {
- if (shouldWarn) {
- warn(i.pos, "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason))
- }
- }
+ def warnNoInline(reason: String): Boolean = {
+ def msg = "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason)
+ if (settings.debug.value)
+ inlineLog("fail", i.method.fullName, reason)
+ if (shouldWarn)
+ warn(i.pos, msg)
+
+ false
+ }
var isAvailable = icodes available concreteMethod.enclClass
@@ -378,92 +403,69 @@ abstract class Inliners extends SubComponent {
isAvailable = icodes.load(concreteMethod.enclClass)
}
- def isCandidate = (
- isClosureClass(receiver)
- || concreteMethod.isEffectivelyFinal
- || receiver.isEffectivelyFinal
- )
+ def isCandidate = (
+ isClosureClass(receiver)
+ || concreteMethod.isEffectivelyFinal
+ || receiver.isEffectivelyFinal
+ )
- def isApply = concreteMethod.name == nme.apply
+ def isApply = concreteMethod.name == nme.apply
- def isCountable = !(
- isClosureClass(receiver)
- || isApply
- || isMonadicMethod(concreteMethod)
- || receiver.enclosingPackage == definitions.RuntimePackage
- ) // only count non-closures
+ def isCountable = !(
+ isClosureClass(receiver)
+ || isApply
+ || isMonadicMethod(concreteMethod)
+ || receiver.enclosingPackage == definitions.RuntimePackage
+ ) // only count non-closures
debuglog("Treating " + i
+ "\n\treceiver: " + receiver
+ "\n\ticodes.available: " + isAvailable
+ "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isEffectivelyFinal)
- if (isAvailable && isCandidate) {
- lookupIMethod(concreteMethod, receiver) match {
-
- case Some(callee) if callee.hasCode =>
- val inc = new IMethodInfo(callee)
- val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
-
- if(inc.hasHandlers && (stackLength == -1)) {
- // no inlining is done, yet don't warn about it, stackLength == -1 indicates we're trying to inlineWithoutTFA.
- // Shortly, a TFA will be computed and an error message reported if indeed inlining not possible.
- return false
- }
-
- (pair isStampedForInlining stackLength) match {
-
- case inlInfo if inlInfo.isSafe =>
-
- (inlInfo: @unchecked) match {
-
- case FeasibleInline(accessNeeded, toBecomePublic) =>
- for(f <- toBecomePublic) {
- debuglog("Making public (synthetic) field-symbol: " + f)
- f setFlag Flags.notPRIVATE
- f setFlag Flags.notPROTECTED
- }
- // only add to `knownSafe` after all `toBecomePublic` fields actually made public.
- if(accessNeeded == NonPublicRefs.Public) { tfa.knownSafe += inc.sym }
-
- case InlineableAtThisCaller => ()
-
- }
-
- retry = true
- inlined = true
- if (isCountable) { count += 1 };
+ if (!isCandidate) warnNoInline("it can be overridden")
+ else if (!isAvailable) warnNoInline("bytecode unavailable")
+ else lookupIMethod(concreteMethod, receiver) filter (callee => callee.hasCode || warnNoInline("callee has no code")) exists { callee =>
+ val inc = new IMethodInfo(callee)
+ val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- pair.doInline(bb, i)
- if (!pair.isInlineForced || inc.isMonadic) { caller.inlinedCalls += 1 };
- inlinedMethodCount(inc.sym) += 1
-
- // Remove the caller from the cache (this inlining might have changed its calls-private relation).
- usesNonPublics -= m
- recentTFAs -= m.symbol
-
-
- case DontInlineHere(msg) =>
- debuglog("inline failed, reason: " + msg)
- warnNoInline(msg)
-
- case NeverSafeToInline => ()
- }
-
- case Some(callee) =>
- assert(!callee.hasCode, "The case clause right before this one should have handled this case.")
- warnNoInline("callee (" + callee + ") has no code")
- ()
+ if (inc.hasHandlers && (stackLength == -1)) {
+ // no inlining is done, yet don't warn about it, stackLength == -1 indicates we're trying to inlineWithoutTFA.
+ // Shortly, a TFA will be computed and an error message reported if indeed inlining not possible.
+ false
+ }
+ else {
+ val isSafe = pair isStampedForInlining stackLength match {
+ case DontInlineHere(msg) => warnNoInline(msg)
+ case NeverSafeToInline => false
+ case InlineableAtThisCaller => true
+ case inl @ FeasibleInline(_, _) if !inl.isSafe => false
+ case FeasibleInline(required, toPublicize) =>
+ for (f <- toPublicize) {
+ inlineLog("access", f, "making public")
+ f setFlag Flags.notPRIVATE
+ f setFlag Flags.notPROTECTED
+ }
+ // only add to `knownSafe` after all `toPublicize` fields actually made public.
+ if (required == NonPublicRefs.Public)
+ tfa.knownSafe += inc.sym
- case None =>
- warnNoInline("bytecode was not available")
- debuglog("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
+ true
+ }
+ isSafe && {
+ retry = true
+ if (isCountable) count += 1
+ pair.doInline(bb, i)
+ if (!pair.isInlineForced || inc.isMonadic) caller.inlinedCalls += 1
+ inlinedMethodCount(inc.sym) += 1
+
+ // Remove the caller from the cache (this inlining might have changed its calls-private relation).
+ usesNonPublics -= m
+ recentTFAs -= m.symbol
+ true
+ }
}
- } else {
- warnNoInline(if(!isAvailable) "bytecode was not available" else "it can be overridden")
}
-
- inlined
}
/* Pre-inlining consists in invoking the usual inlining subroutine with (receiver class, concrete method) pairs as input
@@ -485,7 +487,7 @@ abstract class Inliners extends SubComponent {
do {
retry = false
- log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
+ debuglog(analyzeMessage)
/* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
tfa.callerLin = caller.m.linearizedBlocks()
@@ -567,9 +569,16 @@ abstract class Inliners extends SubComponent {
m.normalize
if (sizeBeforeInlining > 0) {
val instrAfterInlining = m.code.instructionCount
- val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) " !! " else ""
- log(prefix + " %s blocks before inlining: %d (%d) after: %d (%d)".format(
- m.symbol.fullName, sizeBeforeInlining, instrBeforeInlining, m.code.blockCount, instrAfterInlining))
+ val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
+ val inlinings = caller.inlinedCalls
+ if (inlinings > 0) {
+ val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
+ val s2 = if (sizeBeforeInlining == m.code.blockCount) "" else s", blocks $sizeBeforeInlining -> ${m.code.blockCount}"
+ val callees = inlinedMethodCount.toList map { case (k, v) => k.fullNameString + ( if (v == 1) "" else "/" + v ) }
+
+ inlineLog("inlined", m.symbol.fullName, callees.sorted.mkString(inlinings + " inlined: ", ", ", ""))
+ inlineLog("<<tldr>>", m.symbol.fullName, s"${m.symbol.nameString}: $s1$s2")
+ }
}
}
@@ -589,6 +598,8 @@ abstract class Inliners extends SubComponent {
}
class IMethodInfo(val m: IMethod) {
+ override def toString = m.toString
+
val sym = m.symbol
val name = sym.name
def owner = sym.owner
@@ -608,10 +619,11 @@ abstract class Inliners extends SubComponent {
def instructions = m.code.instructions
// def linearized = linearizer linearize m
- def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
- def isLarge = length > MAX_INLINE_SIZE
- def isRecursive = m.recursive
- def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
+ def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
+ def isLarge = length > MAX_INLINE_SIZE
+ def isRecursive = m.recursive
+ def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
+ def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
@@ -661,9 +673,9 @@ abstract class Inliners extends SubComponent {
*
* TODO handle more robustly the case of a trait var changed at the source-level from public to private[this]
* (eg by having ICodeReader use unpickler, see SI-5442).
-
+
DISABLED
-
+
def potentiallyPublicized(f: Symbol): Boolean = {
(m.sourceFile eq NoSourceFile) && f.name.containsChar('$')
}
@@ -687,7 +699,7 @@ abstract class Inliners extends SubComponent {
val i = iter.next()
getAccess(i) match {
case Private =>
- log("instruction " + i + " requires private access.")
+ inlineLog("access", s"instruction $i requires private access", "pos=" + i.pos)
toBecomePublic = Nil
seen = Private
case Protected => seen = Protected
@@ -734,7 +746,7 @@ abstract class Inliners extends SubComponent {
toBecomePublic: List[Symbol]
)
- final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
+ final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: scala.collection.Map[Symbol, Int]) {
assert(!caller.isBridge && inc.m.hasCode,
"A guard in Inliner.analyzeClass() should have prevented from getting here.")
@@ -764,11 +776,10 @@ abstract class Inliners extends SubComponent {
tfa.warnIfInlineFails.remove(instr)
val targetPos = instr.pos
- log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
def blockEmit(i: Instruction) = block.emit(i, targetPos)
def newLocal(baseName: String, kind: TypeKind) =
- new Local(caller.sym.newVariable(freshName(baseName), targetPos), kind, false)
+ new Local(caller.sym.newVariable(freshName(baseName), targetPos) setInfo kind.toType, kind, false)
val (hasRETURN, a) = getRecentTFA(inc.m, isInlineForced)
@@ -955,6 +966,7 @@ abstract class Inliners extends SubComponent {
if(reasonWhyNever != null) {
tfa.knownNever += inc.sym
+ inlineLog("never", inc.sym, reasonWhyNever)
// next time around NeverSafeToInline is returned, thus skipping (duplicate) msg, this is intended.
return DontInlineHere(inc.m + " " + reasonWhyNever)
}
@@ -977,10 +989,15 @@ abstract class Inliners extends SubComponent {
* As a result of (b), some synthetic private members can be chosen to become public.
*/
- if(!isInlineForced && !isScoreOK) {
+ val score = inlinerScore
+ val scoreStr = if (score > 0) "+" + score else "" + score
+ val what = if (score > 0) "ok to" else "don't"
+ inlineLog(scoreStr, inc.m.symbol, s"$what inline into ${ownedName(caller.m.symbol)}")
+
+ if (!isInlineForced && score <= 0) {
// During inlining retry, a previous caller-callee pair that scored low may pass.
// Thus, adding the callee to tfa.knownUnsafe isn't warranted.
- return DontInlineHere("too low score (heuristics)")
+ return DontInlineHere(s"inliner heuristic")
}
if(inc.hasHandlers && (stackLength > inc.minimumStack)) {
@@ -999,7 +1016,9 @@ abstract class Inliners extends SubComponent {
val accReq = inc.accessRequirements
if(!canAccess(accReq.accessNeeded)) {
tfa.knownUnsafe += inc.sym
- return DontInlineHere("access level required by callee not matched by caller")
+ val msg = "access level required by callee not matched by caller"
+ inlineLog("fail", inc.sym, msg)
+ return DontInlineHere(msg)
}
FeasibleInline(accReq.accessNeeded, accReq.toBecomePublic)
@@ -1021,9 +1040,7 @@ abstract class Inliners extends SubComponent {
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- def isScoreOK: Boolean = {
- debuglog("shouldInline: " + caller.m + " , callee:" + inc.m)
-
+ def inlinerScore: Int = {
var score = 0
// better not inline inside closures, but hope that the closure itself is repeatedly inlined
@@ -1031,21 +1048,19 @@ abstract class Inliners extends SubComponent {
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
if (inc.isSmall) score += 1;
+ // if (inc.hasClosureParam) score += 2
if (inc.isLarge) score -= 1;
if (caller.isSmall && isLargeSum) {
score -= 1
- debuglog("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
+ debuglog(s"inliner score decreased to $score because small caller $caller would become large")
}
if (inc.isMonadic) score += 3
else if (inc.isHigherOrder) score += 1
- if (inc.isInClosure) score += 2;
- if (inlinedMethodCount(inc.sym) > 2) score -= 2;
-
- log("shouldInline(" + inc.m + ") score: " + score)
-
- score > 0
+ if (inc.isInClosure) score += 2
+ if (inlinedMethodCount(inc.sym) > 2) score -= 2
+ score
}
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 176c00c025..7f5f412a20 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -3,7 +3,7 @@ package dependencies
import symtab.Flags
-import collection._
+import scala.collection._
/** A component that describes the possible changes between successive
* compilations of a class.
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index 317cc28298..cdde768274 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -2,7 +2,7 @@ package scala.tools.nsc
package dependencies
import io.Path
-import collection._
+import scala.collection._
import symtab.Flags
import scala.tools.nsc.io.AbstractFile
import scala.reflect.internal.util.SourceFile
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index dbc34bd7b3..f5df772d7d 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -8,7 +8,7 @@ package doc
import java.io.File
import java.lang.System
-import language.postfixOps
+import scala.language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
* @param error A function that prints a string to the appropriate error stream
@@ -257,7 +257,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.AbsTypeTag" -> ((tparam: String) => tparam + " is accompanied by an AbsTypeTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) +
("scala.reflect.base.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
/**
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
index 7e57f9fd9f..812b62a1c6 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
@@ -5,8 +5,8 @@
package scala.tools.nsc
package doc
-import language.implicitConversions
-import language.postfixOps
+import scala.language.implicitConversions
+import scala.language.postfixOps
/** Some glue between DocParser (which reads source files which can't be compiled)
* and the scaladoc model.
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 7a74c569f3..f7c5611b8a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -10,8 +10,8 @@ package html
import model._
import comment._
-import xml.{XML, NodeSeq}
-import xml.dtd.{DocType, PublicID}
+import scala.xml.{XML, NodeSeq}
+import scala.xml.dtd.{DocType, PublicID}
import scala.collection._
import java.io.Writer
@@ -87,7 +87,7 @@ abstract class HtmlPage extends Page { thisPage =>
case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
case Code(data) =>
- <pre>{ SyntaxHigh(data) }</pre> //<pre>{ xml.Text(data) }</pre>
+ <pre>{ SyntaxHigh(data) }</pre> //<pre>{ scala.xml.Text(data) }</pre>
case UnorderedList(items) =>
<ul>{ listItemsToHtml(items) }</ul>
case OrderedList(items, listStyle) =>
@@ -119,9 +119,9 @@ abstract class HtmlPage extends Page { thisPage =>
case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
case Monospace(in) => <code>{ inlineToHtml(in) }</code>
- case Text(text) => xml.Text(text)
+ case Text(text) => scala.xml.Text(text)
case Summary(in) => inlineToHtml(in)
- case HtmlTag(tag) => xml.Unparsed(tag)
+ case HtmlTag(tag) => scala.xml.Unparsed(tag)
case EntityLink(target, link) => linkToHtml(target, link, true)
}
@@ -158,11 +158,11 @@ abstract class HtmlPage extends Page { thisPage =>
if (starts.isEmpty && (inPos == string.length))
NodeSeq.Empty
else if (starts.isEmpty)
- xml.Text(string.slice(inPos, string.length))
+ scala.xml.Text(string.slice(inPos, string.length))
else if (inPos == starts.head)
toLinksIn(inPos, starts)
else {
- xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
+ scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
}
}
def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
@@ -173,7 +173,7 @@ abstract class HtmlPage extends Page { thisPage =>
if (hasLinks)
toLinksOut(0, tpe.refEntity.keySet.toList)
else
- xml.Text(string)
+ scala.xml.Text(string)
}
def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
@@ -192,10 +192,10 @@ abstract class HtmlPage extends Page { thisPage =>
if (hasPage(dTpl)) {
<a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
} else {
- xml.Text(if (name eq null) dTpl.name else name)
+ scala.xml.Text(if (name eq null) dTpl.name else name)
}
case ndTpl: NoDocTemplate =>
- xml.Text(if (name eq null) ndTpl.name else name)
+ scala.xml.Text(if (name eq null) ndTpl.name else name)
}
/** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index f1eab841f9..e21ee07963 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc.doc.html
-import xml.NodeSeq
+import scala.xml.NodeSeq
/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
* (see method `HtmlPage.blockToHtml`).
@@ -40,7 +40,7 @@ private[html] object SyntaxHigh {
/** Standard library classes/objects, sorted alphabetically */
val standards = Array (
- "AbsTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
+ "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
"Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
"Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
@@ -281,6 +281,6 @@ private[html] object SyntaxHigh {
}
parse("", 0)
- xml.Unparsed(out.toString)
+ scala.xml.Unparsed(out.toString)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index 6d83b4e6a5..cd76f84a37 100755
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -44,7 +44,7 @@ class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends
<strike>{ name }</strike>
}</div>
<div class="occurrences">{
- for (owner <- occurrences) yield owner ++ xml.Text(" ")
+ for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
}</div>
</div>
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
index a51d66c6cc..edc0736400 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
@@ -10,7 +10,7 @@ package page
import model._
import comment._
-import xml.{NodeSeq, Unparsed}
+import scala.xml.{NodeSeq, Unparsed}
import java.io.File
class Source(sourceFile: File) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 1f68781777..d4e52ba120 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -13,7 +13,7 @@ import model.diagram._
import diagram._
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
-import language.postfixOps
+import scala.language.postfixOps
import model._
import model.diagram._
@@ -49,7 +49,11 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if(top === self) {{
var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
- window.location.href = url + '#' + hash;
+ var anchor = window.location.hash;
+ var anchor_opt = '';
+ if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
+ anchor_opt = '@' + anchor.substring(1);
+ window.location.href = url + '#' + hash + anchor_opt;
}}
</script>
</xml:group>
@@ -89,7 +93,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
NodeSeq.Empty
else
- <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, xml.Text(".")) }</p>
+ <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
}
<body class={ if (tpl.isType) "type" else "value" }>
@@ -148,7 +152,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<div id="ancestors">
<span class="filtertype">Implicitly<br/>
</span>
- <ol id="implicits"> {
+ <ol id="implicits"> {
tpl.conversions.map { conv =>
val name = conv.conversionQualifiedName
val hide = universe.settings.hiddenImplicits(name)
@@ -411,14 +415,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case Nil =>
NodeSeq.Empty
case List(constraint) =>
- xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ xml.Text(".")
+ scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".")
case List(constraint1, constraint2) =>
- xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
- xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ xml.Text(".")
+ scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
+ scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".")
case constraints =>
<br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
var index = 0
- constraints map { constraint => xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
+ constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
}
}
@@ -436,18 +440,18 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
case _ => "" // no parameters
}
- <br/> ++ xml.Text("To access this member you can use a ") ++
+ <br/> ++ scala.xml.Text("To access this member you can use a ") ++
<a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
- target="_blank">type ascription</a> ++ xml.Text(":") ++
+ target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
<br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
}
val shadowingWarning: NodeSeq =
if (mbr.isShadowedImplicit)
- xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
+ scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
"class.") ++ shadowingSuggestion
else if (mbr.isAmbiguousImplicit)
- xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
+ scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
"inherited members have similar signatures, so calling this member may produce an ambiguous " +
"implicit conversion compiler error.") ++ shadowingSuggestion
else NodeSeq.Empty
@@ -467,7 +471,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (fvs.isEmpty || isReduced) NodeSeq.Empty
else {
<dt>Attributes</dt>
- <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ xml.Text(" ") } } }</dd>
+ <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ scala.xml.Text(" ") } } }</dd>
}
}
@@ -476,7 +480,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
else {
<dt>Definition Classes</dt>
- <dd>{ templatesToHtml(inDefTpls, xml.Text(" → ")) }</dd>
+ <dd>{ templatesToHtml(inDefTpls, scala.xml.Text(" → ")) }</dd>
}
}
@@ -624,7 +628,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<div class="toggleContainer block">
<span class="toggle">Linear Supertypes</span>
<div class="superTypes hiddenContent">{
- typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", "))
+ typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
@@ -635,7 +639,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<div class="toggleContainer block">
<span class="toggle">Known Subclasses</span>
<div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.allSubClasses.sortBy(_.name), xml.Text(", "))
+ templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
@@ -659,7 +663,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
case None => NodeSeq.Empty
- case Some(tpe) => xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
+ case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
}
bound0(lo, " >: ") ++ bound0(hi, " <: ")
}
@@ -690,7 +694,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
<xml:group>
<span class="modifier_kind">
- <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ xml.Text(" ")) }</span>
+ <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) }</span>
<span class="kind">{ kindToString(mbr) }</span>
</span>
<span class="symbol">
@@ -868,7 +872,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match {
case Nil => NodeSeq.Empty
case arg :: Nil => argumentToHtml(arg)
- case arg :: args => argumentToHtml(arg) ++ xml.Text(", ") ++ argumentsToHtml0(args)
+ case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args)
}
<span class="args">({ argumentsToHtml0(argss) })</span>
}
@@ -918,29 +922,29 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
case ktcc: KnownTypeClassConstraint =>
- xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
- templateToHtml(ktcc.typeClassEntity) ++ xml.Text(")")
+ scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
+ templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")")
case tcc: TypeClassConstraint =>
- xml.Text(tcc.typeParamName + " is ") ++
+ scala.xml.Text(tcc.typeParamName + " is ") ++
<a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
- context-bounded</a> ++ xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
- templateToHtml(tcc.typeClassEntity) ++ xml.Text(")")
+ context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
+ templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
case impl: ImplicitInScopeConstraint =>
- xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ xml.Text(" is in scope")
+ scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
case eq: EqualTypeParamConstraint =>
- xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
- typeToHtml(eq.rhs, true) ++ xml.Text(")")
+ scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
+ typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
case bt: BoundedTypeParamConstraint =>
- xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
+ scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
- typeToHtml(bt.lowerBound, true) ++ xml.Text(" <: ") ++
- typeToHtml(bt.upperBound, true) ++ xml.Text(")")
+ typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
case lb: LowerBoundedTypeParamConstraint =>
- xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
- typeToHtml(lb.lowerBound, true) ++ xml.Text(")")
+ scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
+ typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
case ub: UpperBoundedTypeParamConstraint =>
- xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
- typeToHtml(ub.upperBound, true) ++ xml.Text(")")
+ scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
+ typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
}
def makeDiagramHtml(tpl: DocTemplateEntity, diagram: Option[Diagram], description: String, id: String) = {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index c46c33c1ee..304c534bdc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -426,7 +426,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
else if (klass.contains("object")) "object"
else ""
- def getPosition(g: xml.Node, axis: String, offset: Double): Option[Double] = {
+ def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = {
val node = g \ "a" \ "text" \ ("@" + axis)
if (node.isEmpty)
None
@@ -508,4 +508,4 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
private val nodeAttributesStr = flatten(nodeAttributes)
private val edgeAttributesStr = flatten(edgeAttributes)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index ed8541f692..a987da8ba6 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -154,7 +154,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
/* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
* {{{
- * implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ * implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
* def isParallel = ...
* }}}
* the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
@@ -894,9 +894,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeAnnotation(annot: AnnotationInfo): Annotation = {
+ def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
val aSym = annot.symbol
- new EntityImpl(aSym, makeTemplate(aSym.owner)) with Annotation {
+ new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
lazy val annotationClass =
makeTemplate(annot.symbol)
val arguments = { // lazy
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index 59cdf05957..47eea52095 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -13,7 +13,7 @@ import scala.collection._
import scala.util.matching.Regex
import scala.annotation.switch
import scala.reflect.internal.util.{NoPosition, Position}
-import language.postfixOps
+import scala.language.postfixOps
/** The comment parser transforms raw comment strings into `Comment` objects.
* Call `parse` to run the parser. Note that the parser is stateless and
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index 2645d8fd14..fa698afaa6 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -4,7 +4,7 @@ package diagram
import model._
import comment.CommentFactory
-import collection.mutable
+import scala.collection.mutable
// statistics
import html.page.diagram.DiagramStats
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index e31c5eda31..7067daec26 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -13,7 +13,7 @@ import scala.reflect.internal.util.FakePos
import dependencies._
import io.AbstractFile
-import language.implicitConversions
+import scala.language.implicitConversions
trait BuildManager {
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index 6acd6d2382..3de2359ce3 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -257,12 +257,18 @@ trait CompilerControl { self: Global =>
*/
def askForResponse[A](op: () => A): Response[A] = {
val r = new Response[A]
- val ir = scheduler askDoQuickly op
- ir onComplete {
- case Left(result) => r set result
- case Right(exc) => r raise exc
+ if (self.onCompilerThread) {
+ try { r set op() }
+ catch { case exc: Throwable => r raise exc }
+ r
+ } else {
+ val ir = scheduler askDoQuickly op
+ ir onComplete {
+ case Left(result) => r set result
+ case Right(exc) => r raise exc
+ }
+ r
}
- r
}
def onCompilerThread = Thread.currentThread == compileRunner
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
index 4a60211254..a906d1454c 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interactive
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Position
trait ContextTrees { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index cb2e7d641b..5514983d98 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -20,7 +20,7 @@ import scala.tools.nsc.io.Pickler._
import scala.tools.nsc.typechecker.DivergentImplicit
import scala.annotation.tailrec
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import language.implicitConversions
+import scala.language.implicitConversions
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
index f57786578a..104a69897d 100644
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interactive
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Position
import reporters.Reporter
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index f4ec03bb47..8ed7a67058 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -13,7 +13,7 @@ import util.EmptyAction
import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
import io.{Pickler, CondPickler}
import io.Pickler._
-import collection.mutable
+import scala.collection.mutable
import mutable.ListBuffer
trait Picklers { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index 2d93c77ca4..afac5828e5 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -133,50 +133,6 @@ object REPL {
iSourceName
}
- /** Compile instrumented source file
- * @param iSourceName The name of the instrumented source file
- * @param arguments Further argumenrs to pass to the compiler
- * @return Optionallu, if no -d option is given, the virtual directory
- * contained the generated bytecode classes
- def compileInstrumented(iSourceName: String, arguments: List[String]): Option[AbstractFile] = {
- println("compiling "+iSourceName)
- val command = new CompilerCommand(iSourceName :: arguments, reporter.error(scala.reflect.internal.util.NoPosition, _))
- val virtualDirectoryOpt =
- if (arguments contains "-d")
- None
- else {
- val vdir = new VirtualDirectory("(memory)", None)
- command.settings.outputDirs setSingleOutput vdir
- Some(vdir)
- }
- val compiler = new scala.tools.nsc.Global(command.settings, reporter)
- val run = new compiler.Run()
- println("compiling: "+command.files)
- run compile command.files
- virtualDirectoryOpt
- }
-
- /** Run instrumented bytecode file
- * @param vdir Optionally, the virtual directory containing the generated bytecode classes
- * @param iFullName The full name of the generated object
- * @param stripped The contents original source file without any right hand column comments.
- * @return The generated file content containing original source in the left column
- * and outputs in the right column
- */
- def runInstrumented(vdirOpt: Option[AbstractFile], iFullName: String, stripped: Array[Char]): Array[Char] = {
- val defaultClassLoader = getClass.getClassLoader
- val classLoader = vdirOpt match {
- case Some(vdir) => new AbstractFileClassLoader(vdir, defaultClassLoader)
- case None => defaultClassLoader
- }
- println("running "+iFullName)
- val si = new SourceInserter(stripped)
- Executor.execute(iFullName, si, classLoader)
- println("done")
- si.currentContents
- }
- */
-
/** The method for implementing worksheet functionality.
* @param arguments a file name, followed by optional command line arguments that are passed
* to the compiler that processes the instrumented source.
@@ -191,7 +147,7 @@ object REPL {
// strip right hand side comment column and any trailing spaces from all lines
val strippedContents = SourceInserter.stripRight(source.content)
val strippedSource = new BatchSourceFile(source.file, strippedContents)
- println("stripped source = "+strippedSource)
+ println("stripped source = "+strippedSource+":"+strippedContents.mkString)
comp.askReload(List(strippedSource), reloadResult)
comp.askInstrumented(strippedSource, line, instrumentedResult)
using(instrumentedResult) {
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index de6974cbb2..40982c62f0 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -220,7 +220,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
/** Return the set of source files that are invalidated by the given changes. */
- def invalidated(files: Set[AbstractFile], changesOf: collection.Map[Symbol, List[Change]],
+ def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
processed: Set[AbstractFile] = Set.empty):
Set[AbstractFile] = {
val buf = new mutable.HashSet[AbstractFile]
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
index 0f52258b7e..ca5bdd632f 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -6,7 +6,7 @@ package scala.tools.nsc
package interactive
import scala.reflect.internal.util.{SourceFile, Position, NoPosition}
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
trait RichCompilationUnits { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
index efc393c812..dfbbb6ff88 100644
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -2,8 +2,9 @@ package scala.tools.nsc
package interactive
import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
-import collection.mutable.ArrayBuffer
-import reflect.internal.Chars.{isLineBreakChar, isWhitespace}
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace}
+import ast.parser.Tokens._
trait ScratchPadMaker { self: Global =>
@@ -11,7 +12,7 @@ trait ScratchPadMaker { self: Global =>
private case class Patch(offset: Int, text: String)
- private class Patcher(contents: Array[Char], endOffset: Int) extends Traverser {
+ private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser {
var objectName: String = ""
private val patches = new ArrayBuffer[Patch]
@@ -24,9 +25,13 @@ trait ScratchPadMaker { self: Global =>
"res$"+resNum
}
- private def nameType(name: String, tpe: Type): String = name+": "+tpe
+ private def nameType(name: String, tpe: Type): String = {
+ // if name ends in symbol character, add a space to separate it from the following ':'
+ val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " "
+ name+pad+": "+tpe
+ }
- private def nameType(sym: Symbol): String = nameType(sym.name.toString, sym.tpe)
+ private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe)
private def literal(str: String) = "\"\"\""+str+"\"\"\""
@@ -42,19 +47,19 @@ trait ScratchPadMaker { self: Global =>
/** The position where to insert an instrumentation statement in front of giuven statement.
* This is at the latest `stat.pos.start`. But in order not to mess with column numbers
- * in position we try to insert it at the end of the preceding line instead.
- * To be safe, this can be done only if there's only whitespace between that position and
- * statement's start position.
+ * in position we try to insert it at the end of the previous token instead.
+ * Furthermore, `(' tokens have to be skipped because they do not show up
+ * in statement range positions.
*/
- private def instrumentPos(stat: Tree): Int = {
- var start = stat.pos.start
- while (start > 0 && isWhitespace(contents(start - 1))) start -= 1
- if (start > 0 && isLineBreakChar(contents(start - 1))) start -= 1
- start
+ private def instrumentPos(start: Int): Int = {
+ val (prevToken, prevStart, prevEnd) = lex.locate(start - 1)
+ if (prevStart >= start) start
+ else if (prevToken == LPAREN) instrumentPos(prevStart)
+ else prevEnd
}
private def addSkip(stat: Tree): Unit = {
- val ipos = instrumentPos(stat)
+ val ipos = instrumentPos(stat.pos.start)
if (stat.pos.start > skipped) applyPendingPatches(ipos)
if (stat.pos.start >= endOffset)
patches += Patch(ipos, ";$stop()")
@@ -98,7 +103,8 @@ trait ScratchPadMaker { self: Global =>
} else {
val resName = nextRes()
val dispResName = resName filter ('$' != _)
- patches += Patch(stat.pos.start, "val " + resName + " = ")
+ val offset = instrumentPos(stat.pos.start)
+ patches += Patch(offset, "val " + resName + " = ")
addSandbox(stat)
toPrint += resultString(nameType(dispResName, stat.tpe), resName)
}
@@ -113,9 +119,11 @@ trait ScratchPadMaker { self: Global =>
val topLevel = objectName.isEmpty
if (topLevel) objectName = tree.symbol.fullName
body foreach traverseStat
- applyPendingPatches(skipped)
- if (topLevel)
- patches += Patch(skipped, epilogue)
+ if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements
+ applyPendingPatches(skipped)
+ if (topLevel)
+ patches += Patch(skipped, epilogue)
+ }
case _ =>
}
@@ -144,6 +152,33 @@ trait ScratchPadMaker { self: Global =>
}
}
+ class LexicalStructure(source: SourceFile) {
+ val token = new ArrayBuffer[Int]
+ val startOffset = new ArrayBuffer[Int]
+ val endOffset = new ArrayBuffer[Int]
+ private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source))
+ scanner.init()
+ while (scanner.token != EOF) {
+ startOffset += scanner.offset
+ token += scanner.token
+ scanner.nextToken
+ endOffset += scanner.lastOffset
+ }
+
+ /** @return token that starts before or at offset, its startOffset, its endOffset
+ */
+ def locate(offset: Int): (Int, Int, Int) = {
+ var lo = 0
+ var hi = token.length - 1
+ while (lo < hi) {
+ val mid = (lo + hi + 1) / 2
+ if (startOffset(mid) <= offset) lo = mid
+ else hi = mid - 1
+ }
+ (token(lo), startOffset(lo), endOffset(lo))
+ }
+ }
+
/** Compute an instrumented version of a sourcefile.
* @param source The given sourcefile.
* @param line The line up to which results should be printed, -1 = whole document.
@@ -156,7 +191,7 @@ trait ScratchPadMaker { self: Global =>
protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
val tree = typedTree(source, true)
val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
- val patcher = new Patcher(source.content, endOffset)
+ val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
patcher.traverse(tree)
(patcher.objectName, patcher.result)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index deeb398d39..cb46c0fdca 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -15,7 +15,7 @@ import scala.annotation.migration
import scala.reflect.internal.util.Position
import scala.reflect.internal.util.SourceFile
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
/** A base class for writing interactive compiler tests.
*
@@ -127,4 +127,4 @@ abstract class InteractiveTest
// the presentation compiler
sys.exit(0)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
index 5270b1971a..aadffe2da5 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
@@ -9,7 +9,7 @@ package tests
import scala.reflect.internal.util._
import reporters._
import io.AbstractFile
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
@@ -168,7 +168,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
}
case class ErrorTrace(
- sfidx: Int, changes: Seq[Change], infos: collection.Set[reporter.Info], content: Array[Char]) {
+ sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
override def toString =
"Sourcefile: "+inputs(sfidx)+
"\nChanges:\n "+changes.mkString("\n ")+
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index 18a8eb5fc3..b5ae5f2d75 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
@@ -36,7 +36,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
/** Return all positions of the given str in the given source file. */
private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
- val buf = new collection.mutable.ListBuffer[Position]
+ val buf = new scala.collection.mutable.ListBuffer[Position]
var pos = source.content.indexOfSlice(str)
while (pos >= 0) {
buf += source.position(pos - 1) // we need the position before the first character of this marker
@@ -44,7 +44,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
}
buf.toList
}
-
+
private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
/** Return the filename:line:col version of this position. */
def showPos(pos: Position): String =
@@ -59,4 +59,4 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
println("ERROR: " + r)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
index b5ea6ab7ce..ba1722382b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -3,7 +3,7 @@ package scala.tools.nsc.interactive.tests.core
case class DuplicateTestMarker(msg: String) extends Exception(msg)
object TestMarker {
- import collection.mutable.Map
+ import scala.collection.mutable.Map
private val markers: Map[String, TestMarker] = Map.empty
private def checkForDuplicate(marker: TestMarker) {
@@ -24,4 +24,4 @@ object CompletionMarker extends TestMarker("/*!*/")
object TypeMarker extends TestMarker("/*?*/")
-object HyperlinkMarker extends TestMarker("/*#*/") \ No newline at end of file
+object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
index abf326e746..5475410a84 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -77,7 +77,7 @@ object CompletionAware {
/** Convenience factories.
*/
def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
- def apply(map: collection.Map[String, CompletionAware]): CompletionAware =
+ def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
apply(() => map.keys.toList, map.get _)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index f49e8d6b59..0f5777d260 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -37,7 +37,7 @@ trait ExprTyper {
}
/** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- def parse(line: String): Option[List[Tree]] = {
+ def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
var isIncomplete = false
reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
val trees = codeParser.stmts(line)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 4cc58d3f25..864f9bd073 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -23,7 +23,7 @@ import scala.reflect.NameTransformer._
import util.ScalaClassLoader
import ScalaClassLoader._
import scala.tools.util._
-import language.{implicitConversions, existentials}
+import scala.language.{implicitConversions, existentials}
import scala.reflect.{ClassTag, classTag}
import scala.tools.reflect.StdRuntimeTags._
@@ -839,7 +839,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
addThunk({
import scala.tools.nsc.io._
import Properties.userHome
- import compat.Platform.EOL
+ import scala.compat.Platform.EOL
val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
if (autorun.isDefined) intp.quietRun(autorun.get)
})
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 96d7dadbd7..e7c56718f7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -25,7 +25,7 @@ import scala.util.control.Exception.{ ultimately }
import IMain._
import java.util.concurrent.Future
import typechecker.Analyzer
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
import scala.tools.reflect.StdRuntimeTags._
@@ -387,8 +387,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
newSym <- req.definedSymbols get name
oldSym <- oldReq.definedSymbols get name.companionName
} {
- replwarn("warning: previously defined %s is not a companion to %s.".format(
- stripString("" + oldSym), stripString("" + newSym)))
+ afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index edb95f7526..bab3a1e506 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -9,7 +9,7 @@ package interpreter
import scala.tools.jline._
import scala.tools.jline.console.completer._
import Completion._
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
index 2dc394a081..f0e643d572 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -6,12 +6,12 @@
package scala.tools.nsc
package interpreter
-import collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
-import language.implicitConversions
+import scala.language.implicitConversions
class ProcessResult(val line: String) {
- import sys.process._
+ import scala.sys.process._
private val buffer = new ListBuffer[String]
val builder = Process(line)
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index c041f02859..bf7204c754 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -10,7 +10,7 @@ import scala.collection.{ mutable, immutable }
import scala.PartialFunction.cond
import scala.reflect.internal.Chars
import scala.reflect.internal.Flags._
-import language.implicitConversions
+import scala.language.implicitConversions
trait MemberHandlers {
val intp: IMain
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
index a2b42aeefc..3203e2ba49 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package interpreter
import NamedParam._
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
index 9124eace05..66d748a9f1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package interpreter
import scala.collection.{ mutable, immutable }
-import language.implicitConversions
+import scala.language.implicitConversions
/** Mix this into an object and use it as a phasing
* swiss army knife.
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index 4ba0c59112..244c04bdf4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -13,7 +13,7 @@ import session.{ History }
import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index 5b8e4c3d92..f27c4a8123 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package interpreter
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.base.{Universe => BaseUniverse}
import scala.reflect.runtime.{universe => ru}
@@ -65,7 +65,7 @@ object ReplVals {
* I have this forwarder which widens the type and then cast the result back
* to the dependent type.
*/
- def compilerTypeFromTag(t: BaseUniverse # AbsTypeTag[_]): Global#Type =
+ def compilerTypeFromTag(t: BaseUniverse # WeakTypeTag[_]): Global#Type =
definitions.compilerTypeFromTag(t)
class AppliedTypeFromTags(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
index 9dcc4006a3..5642566cf7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
@@ -14,7 +14,7 @@ import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
import typechecker.DestructureTypes
import scala.reflect.internal.util.StringOps.ojoin
-import language.implicitConversions
+import scala.language.implicitConversions
/** A more principled system for turning types into strings.
*/
@@ -256,4 +256,4 @@ trait TypeStrings {
)
}
-object TypeStrings extends TypeStrings { } \ No newline at end of file
+object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
index 98129aded8..6a3a2a38ae 100644
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import language.implicitConversions
+import scala.language.implicitConversions
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
@@ -35,10 +35,10 @@ package object interpreter extends ReplConfig with ReplStrings {
val IR = Results
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
- import collection.JavaConverters._
+ import scala.collection.JavaConverters._
xs.asScala.toList map ("" + _)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
index c1cd599941..58232e6b9a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interpreter
-import language.implicitConversions
+import scala.language.implicitConversions
/** Files having to do with the state of a repl session:
* lines of text entered, types and terms defined, etc.
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index 12ba3e4bd7..f66f3daa32 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -8,10 +8,10 @@ package io
import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, FileInputStream, DataOutputStream }
import java.util.jar._
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
import Attributes.Name
import util.ClassPath
-import language.implicitConversions
+import scala.language.implicitConversions
// Attributes.Name instances:
//
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index 48361cd157..b03a921e87 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -1,10 +1,10 @@
package scala.tools.nsc.io
-import annotation.unchecked
+import scala.annotation.unchecked
import Lexer._
import java.io.Writer
-import language.implicitConversions
-import reflect.ClassTag
+import scala.language.implicitConversions
+import scala.reflect.ClassTag
/** An abstract class for writing and reading Scala objects to and
* from a legible representation. The presesentation follows the following grammar:
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 3c4f004198..775ad6bde0 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -8,10 +8,10 @@ package scala.tools.nsc
import java.util.concurrent.{ Future, Callable }
import java.util.{ Timer, TimerTask }
import java.util.jar.{ Attributes }
-import language.implicitConversions
+import scala.language.implicitConversions
package object io {
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index c5da8822d5..a30ae1cb36 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -12,7 +12,7 @@ import scala.reflect.internal.util.OffsetPosition
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
-import language.implicitConversions
+import scala.language.implicitConversions
trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val global : Global
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 0367119547..7aeae485d0 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -11,7 +11,7 @@ import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import JavaTokens._
import scala.annotation.switch
-import language.implicitConversions
+import scala.language.implicitConversions
// Todo merge these better with Scanners
trait JavaScanners extends ast.parser.ScannersCommon {
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 6d8c80d1d0..be8f1e3d9e 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -9,8 +9,8 @@ package matching
import transform.ExplicitOuter
import ast.{ Printers, Trees }
import java.io.{ StringWriter, PrintWriter }
-import annotation.elidable
-import language.postfixOps
+import scala.annotation.elidable
+import scala.language.postfixOps
/** Ancillary bits of ParallelMatching which are better off
* out of the way.
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 1cf4bccb40..93e936fe1f 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -9,7 +9,7 @@ package matching
import transform.ExplicitOuter
import symtab.Flags
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
trait Matrix extends MatrixAdditions {
self: ExplicitOuter with ParallelMatching =>
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 71deb2d356..1d21e4952f 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -13,8 +13,8 @@ import scala.reflect.internal.util.Position
import transform.ExplicitOuter
import symtab.Flags
import mutable.ListBuffer
-import annotation.elidable
-import language.postfixOps
+import scala.annotation.elidable
+import scala.language.postfixOps
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index 8e043613b8..ee96f15f40 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -8,7 +8,7 @@ package matching
import transform.ExplicitOuter
import PartialFunction._
-import language.postfixOps
+import scala.language.postfixOps
trait PatternBindings extends ast.TreeDSL
{
diff --git a/src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala b/src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala
deleted file mode 100644
index 92ccd79df9..0000000000
--- a/src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.tools.nsc.scratchpad
-
-import java.io.OutputStream
-
-class CommentOutputStream(out: CommentWriter, encoding: String = "") extends OutputStream {
-
- override def write(bs: Array[Byte]) =
- out.write(if (encoding.isEmpty) new String(bs) else new String(bs, encoding))
-
- override def write(bs: Array[Byte], off: Int, len: Int) =
- out.write(if (encoding.isEmpty) new String(bs, off, len) else new String(bs, off, len, encoding))
-
- override def write(ch: Int) =
- write(Array(ch.toByte))
-
- override def close() = out.close()
- override def flush() = out.flush()
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala b/src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala
deleted file mode 100644
index eb8880e437..0000000000
--- a/src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.tools.nsc.scratchpad
-
-import java.io.Writer
-import reflect.internal.Chars._
-
-
-class CommentWriter(underlying: SourceInserter, startCol: Int = 40, endCol: Int = 152) extends Writer {
-
- private def rightCol(marker: String) = {
- while (underlying.column < startCol) underlying.write(' ')
- underlying.write(marker)
- }
-
- private var lastWasNL = false
-
- private def writeChar(ch: Char) = {
- if (underlying.column >= endCol) {
- underlying.write('\n'); rightCol("//| ")
- }
- if (underlying.column < startCol) rightCol("//> ")
- underlying.write(ch)
- lastWasNL = isLineBreakChar(ch)
- }
-
- override def write(chs: Array[Char], off: Int, len: Int) = {
- for (i <- off until off + len) writeChar(chs(i))
- flush()
- }
-
- def skip(len: Int) {
- if (lastWasNL) {
- underlying.backspace()
- lastWasNL = false
- }
- underlying.skip(len)
- if (underlying.column >= startCol) underlying.write('\n')
- }
-
- override def close() = underlying.close()
- override def flush() = underlying.flush()
-}
-
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
index 67ff916b11..f7ad39bd95 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
@@ -5,7 +5,7 @@ import java.io.{FileInputStream, InputStreamReader, IOException}
import scala.runtime.ScalaRunTime.stringOf
import java.lang.reflect.InvocationTargetException
import scala.reflect.runtime.ReflectionUtils._
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
class Mixer {
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
index 42a35dc642..5eeab53fca 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
@@ -3,8 +3,7 @@ package scratchpad
import java.io.Writer
import scala.reflect.internal.util.SourceFile
-
-import reflect.internal.Chars._
+import scala.reflect.internal.Chars._
object SourceInserter {
def stripRight(cs: Array[Char]): Array[Char] = {
@@ -21,92 +20,3 @@ object SourceInserter {
(prefixes mkString "\n").toArray
}
}
-class SourceInserter(contents: Array[Char], start: Int = 0, tabInc: Int = 8) extends Writer {
-
- private var buf = contents
- private var offset = start
- private var hilen = contents.length
-
- def length = offset + hilen
-
- private def currentColumn: Int = {
- var i = offset
- while (i > 0 && !isLineBreakChar(buf(i - 1))) i -= 1
- var col = 0
- while (i < offset) {
- col = if (buf(i) == '\t') (col + tabInc) / tabInc * tabInc else col + 1
- i += 1
- }
- col
- }
-
- private var col = currentColumn
-
- def column = synchronized { col }
-
- private def addCapacity(n: Int) = {
- val newlength = length + n
- while (newlength > buf.length) {
- val buf1 = Array.ofDim[Char](buf.length * 2)
- Array.copy(buf, 0, buf1, 0, offset)
- Array.copy(buf, buf.length - hilen, buf1, buf1.length - hilen, hilen)
- buf = buf1
- }
- }
-
- private def insertChar(ch: Char) = {
-// Console.err.print("["+ch+"]")
- buf(offset) = ch
- offset += 1
- ch match {
- case LF => col = 0
- case '\t' => col = (col + tabInc) / tabInc * tabInc
- case _ => col += 1
- }
- }
-
- override def write(ch: Int) = synchronized {
- addCapacity(1)
- insertChar(ch.toChar)
- }
-
- override def write(chs: Array[Char], off: Int, len: Int) = synchronized {
- addCapacity(len)
- for (i <- off until off + len) insertChar(chs(i))
- }
-
- override def close() {
- }
-
- override def flush() {
- // signal buffer change
- }
-
- def currentContents = synchronized {
- if (length == buf.length) buf
- else {
- val res = Array.ofDim[Char](length)
- Array.copy(buf, 0, res, 0, offset)
- Array.copy(buf, buf.length - hilen, res, offset, hilen)
- res
- }
- }
-
- def backspace() = synchronized {
- offset -= 1
- if (offset > 0 && buf(offset) == LF && buf(offset - 1) == CR) offset -=1
- }
-
- def currentChar = synchronized {
- buf(buf.length - hilen)
- }
-
- def skip(len: Int) = synchronized {
- for (i <- 0 until len) {
- val ch = currentChar
- hilen -= 1
- insertChar(ch)
- }
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 120ada965a..78b56a8596 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -15,7 +15,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
type Setting <: AbsSetting // Fix to the concrete Setting type
type ResultOfTryToSet // List[String] in mutable, (Settings, List[String]) in immutable
def errorFn: String => Unit
- protected def allSettings: collection.Set[Setting]
+ protected def allSettings: scala.collection.Set[Setting]
// settings minus internal usage settings
def visibleSettings = allSettings filterNot (_.isInternalOnly)
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 116eed0f31..2ff81ae603 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -536,7 +536,7 @@ class MutableSettings(val errorFn: String => Unit)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
- def unparse: List[String] = name :: value
+ def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 19392ec23a..3ff7af791b 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -8,10 +8,10 @@ package scala.tools
package nsc
package settings
-import annotation.elidable
+import scala.annotation.elidable
import scala.tools.util.PathResolver.Defaults
import scala.collection.mutable
-import language.{implicitConversions, existentials}
+import scala.language.{implicitConversions, existentials}
trait ScalaSettings extends AbsScalaSettings
with StandardScalaSettings
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 8e77f8b6f4..f7c3a55954 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,9 +10,9 @@ import java.io.IOException
import scala.compat.Platform.currentTime
import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
-import reflect.internal.Flags._
-import reflect.internal.MissingRequirementError
-import reflect.internal.util.Statistics
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.MissingRequirementError
+import scala.reflect.internal.util.Statistics
import scala.tools.nsc.io.{ AbstractFile, MsilFile }
/** This class ...
@@ -300,6 +300,6 @@ abstract class SymbolLoaders {
}
object SymbolLoadersStats {
- import reflect.internal.TypesStats.typerNanos
+ import scala.reflect.internal.TypesStats.typerNanos
val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index c6bd236e8a..7e2741f6bc 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -6,4 +6,4 @@
package scala.tools.nsc
package symtab
-abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
+abstract class SymbolTable extends scala.reflect.internal.SymbolTable
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index c596eb014a..d9d25bf95a 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -7,8 +7,8 @@ package scala.tools.nsc
package symtab
import scala.collection.{ mutable, immutable }
-import language.implicitConversions
-import language.postfixOps
+import scala.language.implicitConversions
+import scala.language.postfixOps
/** Printing the symbol graph (for those symbols attached to an AST node)
* after each phase.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index fcd5e369f5..903b3095de 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1164,7 +1164,7 @@ abstract class ClassfileParser {
originalName + " in " + outerName + "(" + externalName +")"
}
- object innerClasses extends collection.mutable.HashMap[Name, InnerClassEntry] {
+ object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
/** Return the Symbol of the top level class enclosing `name`,
* or 'name's symbol if no entry found for `name`.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 437a5e1434..175c322786 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -37,7 +37,7 @@ abstract class ICodeReader extends ClassfileParser {
cls.info // ensure accurate type information
isScalaModule = cls.isModule && !cls.isJavaDefined
- log("Reading class: " + cls + " isScalaModule?: " + isScalaModule)
+ log("ICodeReader reading " + cls)
val name = cls.javaClassName
classPath.findSourceFile(name) match {
@@ -99,11 +99,9 @@ abstract class ICodeReader extends ClassfileParser {
if (sym == NoSymbol)
sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- log("Could not find symbol for " + name + ": " + tpe)
- log(owner.info.member(name).tpe + " : " + tpe)
sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
sym setInfoAndEnter tpe
- log("added " + sym + ": " + sym.tpe)
+ log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
(jflags, sym)
}
@@ -172,10 +170,7 @@ abstract class ICodeReader extends ClassfileParser {
}
else if (nme.isModuleName(name)) {
val strippedName = nme.stripModuleSuffix(name)
- val sym = forceMangledName(newTermName(strippedName.decode), true)
-
- if (sym == NoSymbol) rootMirror.getModule(strippedName)
- else sym
+ forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
}
else {
forceMangledName(name, false)
@@ -956,7 +951,7 @@ abstract class ICodeReader extends ClassfileParser {
case None =>
checkValidIndex
val l = freshLocal(idx, kind, false)
- log("Added new local for idx " + idx + ": " + kind)
+ debuglog("Added new local for idx " + idx + ": " + kind)
locals += (idx -> List((l, kind)))
l
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 3c34cf1c80..29b238c4cb 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -69,7 +69,11 @@ abstract class Pickler extends SubComponent {
}
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
- unit.error(t.pos, "macro has not been expanded")
+ unit.error(t.pos, t.symbol.typeParams.length match {
+ case 0 => "macro has not been expanded"
+ case 1 => "type parameter not specified"
+ case _ => "type parameters not specified"
+ })
return
}
}
@@ -512,7 +516,7 @@ abstract class Pickler extends SubComponent {
private def writeName(name: Name) {
ensureCapacity(name.length * 3)
val utfBytes = Codec toUTF8 name.toString
- compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length)
+ scala.compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length)
writeIndex += utfBytes.length
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
index fe66c515de..1f9a823bb4 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
@@ -2,6 +2,6 @@ package scala.tools.nsc.symtab
package object classfile {
- val ClassfileConstants = reflect.internal.ClassfileConstants
+ val ClassfileConstants = scala.reflect.internal.ClassfileConstants
}
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 5e52415ab2..18b95ba191 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -12,7 +12,7 @@ import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute,
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.UnPickler
import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @author Nikolay Mihaylov
diff --git a/src/compiler/scala/tools/nsc/symtab/package.scala b/src/compiler/scala/tools/nsc/symtab/package.scala
index 1cf0d2c2ae..0e6719f225 100644
--- a/src/compiler/scala/tools/nsc/symtab/package.scala
+++ b/src/compiler/scala/tools/nsc/symtab/package.scala
@@ -2,6 +2,6 @@ package scala.tools.nsc
package object symtab {
- val Flags = reflect.internal.Flags
+ val Flags = scala.reflect.internal.Flags
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index a8cdee7154..ab0a8756a4 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -9,7 +9,7 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -79,12 +79,11 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// -optimise and not otherwise, but the classpath can use arbitrary
// logic so the classpath must be queried.
if (classPath.context.isValidName(implName + ".class")) {
- log("unlinking impl class " + implSym)
iface.owner.info.decls unlink implSym
NoSymbol
}
else {
- log("not unlinking existing " + implSym + " as the impl class is not visible on the classpath.")
+ log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
implSym
}
}
@@ -113,9 +112,10 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
iface.info
implClassMap.getOrElse(iface, atPhase(implClassPhase) {
- log("Creating implClass for " + iface)
- if (iface.implClass ne NoSymbol)
- log("%s.implClass already exists: %s".format(iface, iface.implClass))
+ if (iface.implClass eq NoSymbol)
+ debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
+ else
+ log(s"${iface.fullLocationString} impl class is ${iface.implClass.nameString}")
newImplClass(iface)
})
@@ -137,7 +137,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
* given the decls ifaceDecls of its interface.
*/
private def implDecls(implClass: Symbol, ifaceDecls: Scope): Scope = {
- log("LazyImplClassType calculating decls for " + implClass)
+ debuglog("LazyImplClassType calculating decls for " + implClass)
val decls = newScope
if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol) {
@@ -152,16 +152,16 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
for (sym <- ifaceDecls) {
if (isInterfaceMember(sym)) {
if (needsImplMethod(sym)) {
- log("Cloning " + sym + " for implementation method in " + implClass)
val clone = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
if (currentRun.compiles(implClass)) implMethodMap(sym) = clone
decls enter clone
sym setFlag lateDEFERRED
+ if (!sym.isSpecialized)
+ log(s"Cloned ${sym.name} from ${sym.owner} into implClass ${implClass.fullName}")
}
- else log(sym + " needs no implementation method in " + implClass)
}
else {
- log("Destructively modifying owner of %s from %s to %s".format(sym, sym.owner, implClass))
+ log(s"Destructively modifying owner of $sym from ${sym.owner} to $implClass")
sym.owner = implClass
// note: OK to destructively modify the owner here,
// because symbol will not be accessible from outside the sourcefile.
@@ -174,7 +174,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
override def complete(implSym: Symbol) {
- log("LazyImplClassType completing " + implSym)
+ debuglog("LazyImplClassType completing " + implSym)
/** If `tp` refers to a non-interface trait, return a
* reference to its implementation class. Otherwise return `tp`.
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 570704f049..fa7a53f888 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -9,7 +9,7 @@ package transform
import symtab._
import Flags._
import scala.collection._
-import language.postfixOps
+import scala.language.postfixOps
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
@@ -23,12 +23,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
- private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
- private val staticBodies = mutable.Map.empty[(Symbol, Symbol), Tree]
- private val syntheticClasses = mutable.Map.empty[Symbol, mutable.Set[Tree]] // package and trees
- private val classNames = mutable.Map.empty[Symbol, Set[Name]]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
private def clearStatics() {
newStaticMembers.clear()
newStaticInits.clear()
@@ -48,9 +45,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
result
}
private def transformTemplate(tree: Tree) = {
- val t @ Template(parents, self, body) = tree
+ val Template(parents, self, body) = tree
clearStatics()
-
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
try addStaticInits(templ) // postprocess to include static ctors
@@ -351,8 +347,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** Normal non-Array call */
def genDefaultCall = {
// reflective method call machinery
- val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
- def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
+ val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
+ def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache
def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
@@ -550,75 +546,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else tree
}
- case ValDef(mods, name, tpt, rhs) if tree.symbol.hasStaticAnnotation =>
- def transformStaticValDef = {
- log("moving @static valdef field: " + name + ", in: " + tree.symbol.owner)
- val sym = tree.symbol
- val owner = sym.owner
-
- val staticBeforeLifting = atPhase(currentRun.erasurePhase) { owner.isStatic }
- val isPrivate = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(PRIVATE) }
- val isProtected = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(PROTECTED) }
- val isLazy = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(LAZY) }
- if (!owner.isModuleClass || !staticBeforeLifting) {
- if (!sym.isSynthetic) {
- reporter.error(tree.pos, "Only members of top-level objects and their nested objects can be annotated with @static.")
- tree.symbol.removeAnnotation(StaticClass)
- }
- super.transform(tree)
- } else if (isPrivate || isProtected) {
- reporter.error(tree.pos, "The @static annotation is only allowed on public members.")
- tree.symbol.removeAnnotation(StaticClass)
- super.transform(tree)
- } else if (isLazy) {
- reporter.error(tree.pos, "The @static annotation is not allowed on lazy members.")
- tree.symbol.removeAnnotation(StaticClass)
- super.transform(tree)
- } else if (owner.isModuleClass) {
- val linkedClass = owner.companionClass match {
- case NoSymbol =>
- // create the companion class if it does not exist
- val enclosing = owner.owner
- val compclass = enclosing.newClass(newTypeName(owner.name.toString))
- compclass setInfo ClassInfoType(List(ObjectClass.tpe), newScope, compclass)
- enclosing.info.decls enter compclass
-
- val compclstree = ClassDef(compclass, NoMods, ListOfNil, ListOfNil, List(), tree.pos)
-
- syntheticClasses.getOrElseUpdate(enclosing, mutable.Set()) += compclstree
-
- compclass
- case comp => comp
- }
-
- // create a static field in the companion class for this @static field
- val stfieldSym = linkedClass.newVariable(newTermName(name), tree.pos, STATIC | SYNTHETIC | FINAL) setInfo sym.tpe
- stfieldSym.addAnnotation(StaticClass)
-
- val names = classNames.getOrElseUpdate(linkedClass, linkedClass.info.decls.collect {
- case sym if sym.name.isTermName => sym.name
- } toSet)
- if (names(stfieldSym.name)) {
- reporter.error(
- tree.pos,
- "@static annotated field " + tree.symbol.name + " has the same name as a member of class " + linkedClass.name
- )
- } else {
- linkedClass.info.decls enter stfieldSym
-
- val initializerBody = rhs
-
- // static field was previously initialized in the companion object itself, like this:
- // staticBodies((linkedClass, stfieldSym)) = Select(This(owner), sym.getter(owner))
- // instead, we move the initializer to the static ctor of the companion class
- // we save the entire ValDef/DefDef to extract the rhs later
- staticBodies((linkedClass, stfieldSym)) = tree
- }
- }
- super.transform(tree)
- }
- transformStaticValDef
-
/* MSIL requires that the stack is empty at the end of a try-block.
* Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
* store their result in a local variable. The catch blocks are adjusted as well.
@@ -733,11 +660,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (newStaticInits.isEmpty)
template
else {
- val ctorBody = newStaticInits.toList flatMap {
- case Block(stats, expr) => stats :+ expr
- case t => List(t)
- }
-
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
@@ -746,15 +668,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
deriveDefDef(ctor) {
case block @ Block(stats, expr) =>
// need to add inits to existing block
- treeCopy.Block(block, ctorBody ::: stats, expr)
+ treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
case term: TermTree =>
// need to create a new block with inits and the old term
- treeCopy.Block(term, ctorBody, term)
+ treeCopy.Block(term, newStaticInits.toList, term)
}
case _ =>
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
- val rhs = Block(ctorBody, Literal(Constant(())))
+ val rhs = Block(newStaticInits.toList, Literal(Constant(())))
localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
@@ -762,61 +684,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- private def addStaticDeclarations(tree: Template, clazz: Symbol) {
- // add static field initializer statements for each static field in clazz
- if (!clazz.isModuleClass) for {
- staticSym <- clazz.info.decls
- if staticSym.hasStaticAnnotation
- } staticSym match {
- case stfieldSym if stfieldSym.isVariable =>
- val valdef = staticBodies((clazz, stfieldSym))
- val ValDef(_, _, _, rhs) = valdef
- val fixedrhs = rhs.changeOwner((valdef.symbol, clazz.info.decl(nme.CONSTRUCTOR)))
-
- val stfieldDef = localTyper.typedPos(tree.pos)(VAL(stfieldSym) === EmptyTree)
- val flattenedInit = fixedrhs match {
- case Block(stats, expr) => Block(stats, REF(stfieldSym) === expr)
- case rhs => REF(stfieldSym) === rhs
- }
- val stfieldInit = localTyper.typedPos(tree.pos)(flattenedInit)
-
- // add field definition to new defs
- newStaticMembers append stfieldDef
- newStaticInits append stfieldInit
- case _ => // ignore @static on other members
- }
- }
-
-
-
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- super.transformStats(stats, exprOwner) ++ {
- // flush pending synthetic classes created in this owner
- val synthclassdefs = syntheticClasses.get(exprOwner).toList.flatten
- syntheticClasses -= exprOwner
- synthclassdefs map {
- cdef => localTyper.typedPos(cdef.pos)(cdef)
- }
- } map {
- case clsdef @ ClassDef(mods, name, tparams, t @ Template(parent, self, body)) =>
- // process all classes in the package again to add static initializers
- clearStatics()
-
- addStaticDeclarations(t, clsdef.symbol)
-
- val templ = deriveTemplate(t)(_ => transformTrees(newStaticMembers.toList) ::: body)
- val ntempl =
- try addStaticInits(templ)
- finally clearStatics()
-
- val derived = deriveClassDef(clsdef)(_ => ntempl)
- classNames.remove(clsdef.symbol)
- derived
-
- case stat => stat
- }
- }
-
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index afc109c47a..23b15a9033 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -186,15 +186,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// before the superclass constructor call, otherwise it goes after.
// Lazy vals don't get the assignment in the constructor.
if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (stat.symbol.hasStaticAnnotation) {
- debuglog("@static annotated field initialization skipped.")
- defBuf += deriveValDef(stat)(tree => tree)
- } else if (rhs != EmptyTree && !stat.symbol.isLazy) {
+ if (rhs != EmptyTree && !stat.symbol.isLazy) {
val rhs1 = intoConstructor(stat.symbol, rhs);
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
- defBuf += deriveValDef(stat)(_ => EmptyTree)
}
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
case ClassDef(_, _, _, _) =>
// classes are treated recursively, and left in the template
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index d97fbf5daa..b3b0c82d38 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -12,7 +12,7 @@ import symtab._
import Flags._
abstract class Erasure extends AddInterfaces
- with reflect.internal.transform.Erasure
+ with scala.reflect.internal.transform.Erasure
with typechecker.Analyzer
with TypingTransformers
with ast.TreeDSL
@@ -326,7 +326,7 @@ abstract class Erasure extends AddInterfaces
}
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
- private lazy val interceptedMethods = Set[Symbol](Any_##, Object_##, Any_getClass) ++ (
+ private lazy val interceptedMethods = Set[Symbol](Any_##, Object_##, Any_getClass, AnyVal_getClass) ++ (
// Each value class has its own getClass for ultra-precise class object typing.
ScalaValueClasses map (_.tpe member nme.getClass_)
)
@@ -490,7 +490,7 @@ abstract class Erasure extends AddInterfaces
@inline private def box(tree: Tree, target: => String): Tree = {
val result = box1(tree)
- log("boxing "+tree+":"+tree.tpe+" to "+target+" = "+result+":"+result.tpe)
+ log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
result
}
@@ -525,7 +525,7 @@ abstract class Erasure extends AddInterfaces
* fields (see TupleX). (ID)
*/
case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- log("boxing an unbox: " + tree + "/" + tree.symbol + " and replying with " + arg + " of type " + arg.tpe)
+ log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
arg
case _ =>
(REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
@@ -537,7 +537,7 @@ abstract class Erasure extends AddInterfaces
private def unbox(tree: Tree, pt: Type): Tree = {
val result = unbox1(tree, pt)
- log("unboxing "+tree+":"+tree.tpe+" to "+pt+" = "+result+":"+result.tpe)
+ log(s"unboxing ${tree.summaryString}: ${tree.tpe} with pt=$pt as type ${result.tpe}")
result
}
@@ -614,7 +614,7 @@ abstract class Erasure extends AddInterfaces
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
- //if (settings.debug.value && pt != WildcardType)
+ if (settings.debug.value && pt != WildcardType)
log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
if (tree.tpe <:< pt)
tree
@@ -959,7 +959,7 @@ abstract class Erasure extends AddInterfaces
case TypeApply(sel @ Select(qual, name), List(targ)) =>
if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
-
+
def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
Apply(
TypeApply(
@@ -1069,9 +1069,11 @@ abstract class Erasure extends AddInterfaces
case _ =>
global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
}
- } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
+ } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
+ } else if (fn.symbol == AnyVal_getClass) {
+ tree setSymbol Object_getClass
} else {
tree
}
@@ -1079,8 +1081,8 @@ abstract class Erasure extends AddInterfaces
case New(tpt) if name == nme.CONSTRUCTOR && tpt.tpe.typeSymbol.isDerivedValueClass =>
// println("inject derived: "+arg+" "+tpt.tpe)
val List(arg) = args
- InjectDerivedValue(arg) addAttachment //@@@ setSymbol tpt.tpe.typeSymbol
- new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
+ val attachment = new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
+ InjectDerivedValue(arg) updateAttachment attachment
case _ =>
preEraseNormalApply(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 2831afc48e..0820d3e714 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -111,7 +111,8 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth)
+ // No variance for method type parameters
+ var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
def transform(clonedType: Type): Type = clonedType match {
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index 94eaba67d7..3bbf429fc2 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -22,12 +22,14 @@ abstract class Flatten extends InfoTransform {
*/
private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
val scope = sym.owner.info.decls
- val old = scope lookup sym.name
- if (old ne NoSymbol)
- scope unlink old
-
+ val old = scope lookup sym.name andAlso scope.unlink
scope enter sym
- log("lifted " + sym.fullLocationString)
+
+ if (old eq NoSymbol)
+ log(s"lifted ${sym.fullLocationString}")
+ else
+ log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
+
old
}
@@ -35,9 +37,7 @@ abstract class Flatten extends InfoTransform {
if (!sym.isLifted) {
sym setFlag LIFTED
debuglog("re-enter " + sym.fullLocationString)
- val old = replaceSymbolInCurrentScope(sym)
- if (old ne NoSymbol)
- log("unlinked " + old.fullLocationString + " after lifting " + sym)
+ replaceSymbolInCurrentScope(sym)
}
}
private def liftSymbol(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index b6d54f114e..c41ff20229 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -154,7 +154,7 @@ abstract class LambdaLift extends InfoTransform {
private def markCalled(sym: Symbol, owner: Symbol) {
debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
symSet(called, owner) addEntry sym
- if (sym.enclClass != owner.enclClass) calledFromInner addEntry sym
+ if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
/** The traverse function */
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 79b24e826d..2b0520592b 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -430,7 +430,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*
* Such fields will be nulled after the initializer has memoized the lazy value.
*/
- def singleUseFields(templ: Template): collection.Map[Symbol, List[Symbol]] = {
+ def singleUseFields(templ: Template): scala.collection.Map[Symbol, List[Symbol]] = {
val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
object SingleUseTraverser extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 4401e3bd3e..f9d8d19b10 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -9,7 +9,7 @@ package transform
import scala.collection.mutable
import symtab.Flags._
import util.HashSet
-import annotation.tailrec
+import scala.annotation.tailrec
/** A class that yields a kind of iterator (`Cursor`),
* which yields all pairs of overriding/overridden symbols
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index fc9e611d20..0fa50a255b 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -8,8 +8,8 @@ package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
-import language.postfixOps
-import language.existentials
+import scala.language.postfixOps
+import scala.language.existentials
/** Specialize code on types.
*
@@ -436,7 +436,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val sClassMap = anyrefSpecCache.getOrElseUpdate(sClass, mutable.Map[Symbol, Symbol]())
sClassMap.getOrElseUpdate(tparam,
- tparam.cloneSymbol(sClass, tparam.flags, (tparam.name append tpnme.SPECIALIZED_SUFFIX).asInstanceOf[Name]) // [Eugene] why do we need this cast?
+ tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
).tpe
}
@@ -811,12 +811,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specializingOn = specializingOn filterNot (unusedStvars contains)
}
for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
+ // !!! Can't this logic be structured so that the new symbol's name is
+ // known when the symbol is cloned? It is much cleaner not to be mutating
+ // names after the fact. And it adds about a billion lines of
+ // "Renaming value _1 in class Tuple2 to _1$mcZ$sp" to obscure the small
+ // number of other (important) actual symbol renamings.
val tps = survivingParams(sym.info.typeParams, env0)
- val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~DEFERRED)
+ val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~DEFERRED) // <-- this needs newName = ...
val env = mapAnyRefsInSpecSym(env0, sym, specMember)
val (keys, vals) = env.toList.unzip
- specMember setName specializedName(sym, env)
+ specMember setName specializedName(sym, env) // <-- but the name is calculated based on the cloned symbol
// debuglog("%s normalizes to %s%s".format(sym, specMember,
// if (tps.isEmpty) "" else " with params " + tps.mkString(", ")))
@@ -897,7 +902,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
// this method properly duplicates the symbol's info
- ( sym.cloneSymbol(owner, newFlags, specializedName(sym, env))
+ ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
)
}
@@ -912,7 +917,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* this method will return List('apply$mcII$sp')
*/
- private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialOverrides(" + clazz + ")", _.nonEmpty) {
+ private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) {
/** Return the overridden symbol in syms that needs a specialized overriding symbol,
* together with its specialization environment. The overridden symbol may not be
* the closest to 'overriding', in a given hierarchy.
@@ -1469,13 +1474,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case Select(qual, name) =>
def transformSelect = {
qual match {
- case _: Super if illegalSpecializedInheritance(currentClass) =>
+ case _: Super if illegalSpecializedInheritance(currentClass) =>
val pos = tree.pos
debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
debuglog(pos.lineContent)
tree
case _ =>
-
+
debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
//log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index b1b930ca2d..0ad6d6c677 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -90,7 +90,7 @@ abstract class TailCalls extends Transform {
private val defaultReason = "it contains a recursive call not in tail position"
/** Has the label been accessed? Then its symbol is in this set. */
- private val accessed = new collection.mutable.HashSet[Symbol]()
+ private val accessed = new scala.collection.mutable.HashSet[Symbol]()
// `accessed` was stored as boolean in the current context -- this is no longer tenable
// with jumps to labels in tailpositions now considered in tailposition,
// a downstream context may access the label, and the upstream one will be none the wiser
@@ -373,7 +373,7 @@ abstract class TailCalls extends Transform {
// the labels all look like: matchEnd(x) {x}
// then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr)
class TailPosLabelsTraverser extends Traverser {
- val tailLabels = new collection.mutable.HashSet[Symbol]()
+ val tailLabels = new scala.collection.mutable.HashSet[Symbol]()
private var maybeTail: Boolean = true // since we start in the rhs of a DefDef
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 181463657b..5a3db26e30 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -8,7 +8,7 @@ package transform
import symtab.Flags._
import scala.collection.{ mutable, immutable }
-import language.postfixOps
+import scala.language.postfixOps
/*<export> */
/** - uncurry all symbol and tree types (@see UnCurryPhase) -- this includes normalizing all proper types.
@@ -44,7 +44,7 @@ import language.postfixOps
*/
/*</export> */
abstract class UnCurry extends InfoTransform
- with reflect.internal.transform.UnCurry
+ with scala.reflect.internal.transform.UnCurry
with TypingTransformers with ast.TreeDSL {
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -212,11 +212,6 @@ abstract class UnCurry extends InfoTransform
/** Undo eta expansion for parameterless and nullary methods */
def deEta(fun: Function): Tree = fun match {
- case Function(List(), Apply(expr, List())) if treeInfo.isExprSafeToInline(expr) =>
- if (expr hasSymbolWhich (_.isLazy))
- fun
- else
- expr
case Function(List(), expr) if isByNameRef(expr) =>
noApply += expr
expr
@@ -484,11 +479,7 @@ abstract class UnCurry extends InfoTransform
arg setType functionType(Nil, arg.tpe)
}
else {
- log("byname | %s | %s | %s".format(
- arg.pos.source.path + ":" + arg.pos.line, fun.fullName,
- if (fun.isPrivate) "private" else "")
- )
-
+ log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
arg match {
// don't add a thunk for by-name argument if argument already is an application of
// a Function0. We can then remove the application and use the existing Function0.
@@ -693,16 +684,16 @@ abstract class UnCurry extends InfoTransform
else
tree
}
-
+
def isThrowable(pat: Tree): Boolean = pat match {
- case Typed(Ident(nme.WILDCARD), tpt) =>
+ case Typed(Ident(nme.WILDCARD), tpt) =>
tpt.tpe =:= ThrowableClass.tpe
- case Bind(_, pat) =>
+ case Bind(_, pat) =>
isThrowable(pat)
case _ =>
false
}
-
+
def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
def postTransformTry(tree: Try) = {
@@ -766,10 +757,10 @@ abstract class UnCurry extends InfoTransform
case tree: Try =>
postTransformTry(tree)
-
+
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
-
+
case Ident(name) =>
assert(name != tpnme.WILDCARD_STAR, tree)
applyUnary()
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index ab8836f339..399f9a1eac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
/** The main attribution phase.
*/
@@ -72,7 +72,7 @@ trait Analyzer extends AnyRef
}
object typerFactory extends SubComponent {
- import reflect.internal.TypesStats.typerNanos
+ import scala.reflect.internal.TypesStats.typerNanos
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 773d9a6f50..e34988af1a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -85,10 +85,33 @@ trait ContextErrors {
def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
+
"type mismatch" + foundReqMsg(found, req) + missingArgsMsg
}
}
+ def notAnyRefMessage(found: Type): String = {
+ val tp = found.widen
+ def name = tp.typeSymbol.nameString
+ def parents = tp.parents filterNot isTrivialTopType
+ def onlyAny = tp.parents forall (_.typeSymbol == AnyClass)
+ def parents_s = ( if (parents.isEmpty) tp.parents else parents ) mkString ", "
+ def what = (
+ if (tp.typeSymbol.isAbstractType) {
+ val descr = if (onlyAny) "unbounded" else "bounded only by " + parents_s
+ s"$name is $descr, which means AnyRef is not a known parent"
+ }
+ else if (tp.typeSymbol.isAnonOrRefinementClass)
+ s"the parents of this type ($parents_s) extend Any, not AnyRef"
+ else
+ s"$name extends Any, not AnyRef"
+ )
+ if (isPrimitiveValueType(found) || isTrivialTopType(tp)) "" else "\n" +
+ s"""|Note that $what.
+ |Such types can participate in value classes, but instances
+ |cannot appear in singleton types or in reference comparisons.""".stripMargin
+ }
+
import ErrorUtils._
trait TyperContextErrors {
@@ -296,12 +319,17 @@ trait ContextErrors {
else
""
)
- companion + semicolon
+ val notAnyRef = (
+ if (ObjectClass.info.member(name).exists) notAnyRefMessage(target)
+ else ""
+ )
+ companion + notAnyRef + semicolon
}
+ def targetStr = targetKindString + target.directObjectString
withAddendum(qual.pos)(
- if (name == nme.CONSTRUCTOR) target + " does not have a constructor"
- else nameString + " is not a member of " + targetKindString + target.directObjectString + addendum
- )
+ if (name == nme.CONSTRUCTOR) s"$target does not have a constructor"
+ else s"$nameString is not a member of $targetStr$addendum"
+ )
}
issueNormalTypeError(sel, errMsg)
// the error has to be set for the copied tree, otherwise
@@ -442,9 +470,6 @@ trait ContextErrors {
def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
NormalTypeError(tree, "macros application do not support named and/or default arguments")
- def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
-
def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
@@ -484,15 +509,22 @@ trait ContextErrors {
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
- def WrongNumberArgsPatternError(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "wrong number of arguments for "+treeSymTypeMsg(fun))
+ def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
def ApplyWithoutArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, fun.tpe+" does not take parameters")
+ // Dynamic
def DynamicVarArgUnsupported(tree: Tree, name: String) =
issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
+ def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
+ issueTypeError(PosAndMsgTypeError(err.errPos, err.errMsg +
+ s"\nerror after rewriting to $tree\npossible cause: maybe a wrong Dynamic method signature?"))
+ setError(tree)
+ }
+
//checkClassType
def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = {
issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix")
@@ -692,7 +724,8 @@ trait ContextErrors {
Some(EOL + stackTraceString(realex))
}
} catch {
- // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
+ // the code above tries various tricks to detect the relevant portion of the stack trace
+ // if these tricks fail, just fall back to uninformative, but better than nothing, getMessage
case NonFatal(ex) =>
macroLogVerbose("got an exception when processing a macro generated exception\n" +
"offender = " + stackTraceString(realex) + "\n" +
@@ -713,7 +746,7 @@ trait ContextErrors {
)
val forgotten = (
if (sym.isTerm) "splice when splicing this variable into a reifee"
- else "c.AbsTypeTag annotation for this type parameter"
+ else "c.WeakTypeTag annotation for this type parameter"
)
macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
}
@@ -1095,44 +1128,42 @@ trait ContextErrors {
pre1: String, pre2: String, trailer: String)
(isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = {
if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
- val coreMsg =
- pre1+" "+info1.sym.fullLocationString+" of type "+info1.tpe+"\n "+
- pre2+" "+info2.sym.fullLocationString+" of type "+info2.tpe+"\n "+
- trailer
- val errMsg =
- if (isView) {
- val found = pt.typeArgs(0)
- val req = pt.typeArgs(1)
- def defaultExplanation =
- "Note that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
-
- def explanation = {
- val sym = found.typeSymbol
- // Explain some common situations a bit more clearly.
- if (AnyRefClass.tpe <:< req) {
- if (sym == AnyClass || sym == UnitClass) {
- "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" +
- "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."
- }
- else boxedClass get sym match {
- case Some(boxed) =>
- "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" +
- "methods inherited from Object are rendered ambiguous. This is to avoid\n" +
- "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" +
- "You may wish to use a type ascription: `x: " + boxed.fullName + "`."
- case _ =>
- defaultExplanation
- }
- }
- else defaultExplanation
- }
-
- typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + "\n" + explanation
- } else {
- "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
+ def coreMsg =
+ s"""| $pre1 ${info1.sym.fullLocationString} of type ${info1.tpe}
+ | $pre2 ${info2.sym.fullLocationString} of type ${info2.tpe}
+ | $trailer""".stripMargin
+ def viewMsg = {
+ val found :: req :: _ = pt.typeArgs
+ def explanation = {
+ val sym = found.typeSymbol
+ // Explain some common situations a bit more clearly. Some other
+ // failures which have nothing to do with implicit conversions
+ // per se, but which manifest as implicit conversion conflicts
+ // involving Any, are further explained from foundReqMsg.
+ if (AnyRefClass.tpe <:< req) (
+ if (sym == AnyClass || sym == UnitClass) (
+ s"""|Note: ${sym.name} is not implicitly converted to AnyRef. You can safely
+ |pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so.""".stripMargin
+ )
+ else boxedClass get sym map (boxed =>
+ s"""|Note: an implicit exists from ${sym.fullName} => ${boxed.fullName}, but
+ |methods inherited from Object are rendered ambiguous. This is to avoid
+ |a blanket implicit which would convert any ${sym.fullName} to any AnyRef.
+ |You may wish to use a type ascription: `x: ${boxed.fullName}`.""".stripMargin
+ ) getOrElse ""
+ )
+ else
+ s"""|Note that implicit conversions are not applicable because they are ambiguous:
+ |${coreMsg}are possible conversion functions from $found to $req""".stripMargin
}
- context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos, errMsg))
+ typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+ if (explanation == "") "" else "\n" + explanation
+ )
+ }
+ context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+ if (isView) viewMsg
+ else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
+ )
}
}
@@ -1211,7 +1242,7 @@ trait ContextErrors {
message + suffix
}
- private def abbreviateCoreAliases(s: String): String = List("AbsTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+ private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
@@ -1290,7 +1321,7 @@ trait ContextErrors {
// aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
// rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
- def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than AbsTypeTag evidences")
+ def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 6a908c6c65..dbf769c79f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -8,7 +8,7 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable.{LinkedHashSet, Set}
-import annotation.tailrec
+import scala.annotation.tailrec
/**
* @author Martin Odersky
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 5802d36878..e8865964b0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import language.implicitConversions
+import scala.language.implicitConversions
/** A generic means of breaking down types into their subcomponents.
* Types are decomposed top down, and recognizable substructure is
@@ -39,7 +39,7 @@ trait DestructureTypes {
private implicit def liftToTerm(name: String): TermName = newTermName(name)
- private val openSymbols = collection.mutable.Set[Symbol]()
+ private val openSymbols = scala.collection.mutable.Set[Symbol]()
private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
if (elems.isEmpty) wrapEmpty else list(elems map mkNode)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 070f083a89..97e86d183e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -29,7 +29,7 @@ abstract class Duplicators extends Analyzer {
* the old class with the new class, and map symbols through the given 'env'. The
* environment is a map from type skolems to concrete types (see SpecializedTypes).
*/
- def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: collection.Map[Symbol, Type]): Tree = {
+ def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree = {
if (oldThis ne newThis) {
oldClassOwner = oldThis
newClassOwner = newThis
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 9e175fa516..b04a736fd3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -47,7 +47,7 @@ trait EtaExpansion { self: Analyzer =>
* tree is already attributed
* </p>
*/
- def etaExpand(unit : CompilationUnit, tree: Tree): Tree = {
+ def etaExpand(unit : CompilationUnit, tree: Tree, typer: Typer): Tree = {
val tpe = tree.tpe
var cnt = 0 // for NoPosition
def freshName() = {
@@ -69,7 +69,11 @@ trait EtaExpansion { self: Analyzer =>
val vname: Name = freshName()
// Problem with ticket #2351 here
defs += atPos(tree.pos) {
- val rhs = if (byName) Function(List(), tree) else tree
+ val rhs = if (byName) {
+ val res = typer.typed(Function(List(), tree))
+ new ChangeOwnerTraverser(typer.context.owner, res.symbol) traverse tree // SI-6274
+ res
+ } else tree
ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), rhs)
}
atPos(tree.pos.focus) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 226e17f605..dd7f26861f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -11,13 +11,13 @@
package scala.tools.nsc
package typechecker
-import annotation.tailrec
+import scala.annotation.tailrec
import scala.collection.{ mutable, immutable }
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
import scala.reflect.internal.util.Statistics
-import language.implicitConversions
+import scala.language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
*
@@ -593,7 +593,7 @@ trait Implicits {
typed1(itree, EXPRmode, wildPt)
if (context.hasErrors)
- return fail("typed implicit %s has errors".format(info.sym.fullLocationString))
+ return fail(context.errBuffer.head.errMsg)
if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
@@ -615,7 +615,7 @@ trait Implicits {
}
if (context.hasErrors)
- fail("hasMatchingSymbol reported threw error(s)")
+ fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
else if (isLocal && !hasMatchingSymbol(itree1))
fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
@@ -639,7 +639,7 @@ trait Implicits {
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
if (context.hasErrors)
- return fail("type parameters weren't correctly instantiated outside of the implicit tree")
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
@@ -675,7 +675,7 @@ trait Implicits {
}
if (context.hasErrors)
- fail("typing TypeApply reported errors for the implicit tree")
+ fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
else {
val result = new SearchResult(itree2, subst)
if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
@@ -1151,7 +1151,7 @@ trait Implicits {
private def TagSymbols = TagMaterializers.keySet
private val TagMaterializers = Map[Symbol, Symbol](
ClassTagClass -> MacroInternal_materializeClassTag,
- AbsTypeTagClass -> MacroInternal_materializeAbsTypeTag,
+ WeakTypeTagClass -> MacroInternal_materializeWeakTypeTag,
TypeTagClass -> MacroInternal_materializeTypeTag
)
@@ -1217,14 +1217,14 @@ trait Implicits {
private val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
/** Creates a tree that calls the relevant factory method in object
- * reflect.Manifest for type 'tp'. An EmptyTree is returned if
+ * scala.reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
*/
private def manifestOfType(tp: Type, flavor: Symbol): SearchResult = {
val full = flavor == FullManifestClass
val opt = flavor == OptManifestClass
- /** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
+ /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
if (args contains EmptyTree) EmptyTree
else typedPos(tree.pos.focus) {
@@ -1334,7 +1334,7 @@ trait Implicits {
def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
- /** Materializes implicits of magic types (currently, manifests and tags).
+ /** Materializes implicits of predefined types (currently, manifests and tags).
* Will be replaced by implicit macros once we fix them.
*/
private def materializeImplicit(pt: Type): SearchResult =
@@ -1472,7 +1472,7 @@ trait Implicits {
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
def validate: Option[String] = {
- import scala.util.matching.Regex; import collection.breakOut
+ import scala.util.matching.Regex; import scala.collection.breakOut
// is there a shorter way to avoid the intermediate toList?
val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
val decls = typeParamNames.toSet
@@ -1492,7 +1492,7 @@ trait Implicits {
object ImplicitsStats {
- import reflect.internal.TypesStats._
+ import scala.reflect.internal.TypesStats._
val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount)
val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 28636fc76e..294470d40e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -280,7 +280,16 @@ trait Infer {
def issue(err: AbsTypeError): Unit = context.issue(err)
- def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
+ def isPossiblyMissingArgs(found: Type, req: Type) = (
+ false
+ /** However it is that this condition is expected to imply
+ * "is possibly missing args", it is too weak. It is
+ * better to say nothing than to offer misleading guesses.
+
+ (found.resultApprox ne found)
+ && isWeaklyCompatible(found.resultApprox, req)
+ */
+ )
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -517,8 +526,8 @@ trait Infer {
* and the code is not exactly readable.
*/
object AdjustedTypeArgs {
- val Result = collection.mutable.LinkedHashMap
- type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = scala.collection.mutable.LinkedHashMap
+ type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
(m collect {case (p, Some(a)) => (p, a)}).unzip ))
@@ -914,10 +923,13 @@ trait Infer {
/** Is sym1 (or its companion class in case it is a module) a subclass of
* sym2 (or its companion class in case it is a module)?
*/
- def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean =
- sym1 != sym2 && sym1 != NoSymbol && (sym1 isSubClass sym2) ||
- sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2) ||
- sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass)
+ def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
+ (sym1 != sym2) && (sym1 != NoSymbol) && (
+ (sym1 isSubClass sym2)
+ || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+ || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+ )
+ )
/** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
*/
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 01e773e528..9adf86e44b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -7,7 +7,7 @@ import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
import java.lang.{Class => jClass}
import java.lang.reflect.{Array => jArray, Method => jMethod}
@@ -26,7 +26,7 @@ import scala.reflect.macros.runtime.AbortMacroException
*
* Then fooBar needs to point to a static method of the following form:
*
- * def fooBar[T: c.AbsTypeTag] // type tag annotation is optional
+ * def fooBar[T: c.WeakTypeTag] // type tag annotation is optional
* (c: scala.reflect.macros.Context)
* (xs: c.Expr[List[T]])
* : c.Expr[T] = {
@@ -49,9 +49,6 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import MacrosStats._
def globalSettings = global.settings
- val globalMacroCache = collection.mutable.Map[Any, Any]()
- val perRunMacroCache = perRunCaches.newMap[Symbol, collection.mutable.Map[Any, Any]]
-
/** `MacroImplBinding` and its companion module are responsible for
* serialization/deserialization of macro def -> impl bindings.
*
@@ -84,7 +81,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val methName: String,
// flattens the macro impl's parameter lists having symbols replaced with metadata
// currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
- // f.ex. for: def impl[T: AbsTypeTag, U: AbsTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
+ // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
// `signature` will be equal to List(-1, -1, 0, 1)
val signature: List[Int],
// type arguments part of a macro impl ref (the right-hand side of a macro definition)
@@ -216,7 +213,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
}
/** Transforms parameters lists of a macro impl.
- * The `transform` function is invoked only for AbsTypeTag evidence parameters.
+ * The `transform` function is invoked only for WeakTypeTag evidence parameters.
*
* The transformer takes two arguments: a value parameter from the parameter list
* and a type parameter that is witnesses by the value parameter.
@@ -232,7 +229,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
- case TypeRef(SingleType(SingleType(NoPrefix, c), universe), AbsTypeTagClass, targ :: Nil)
+ case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
if c == paramss.head.head && universe == MacroContextUniverse =>
transform(param, targ.typeSymbol)
case _ =>
@@ -336,10 +333,10 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
RepeatedParamClass.typeConstructor,
List(implType(isType, sigma(origTpe.typeArgs.head))))
else {
- val tsym = getMember(MacroContextClass, if (isType) tpnme.AbsTypeTag else tpnme.Expr)
+ val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
}
- val paramCache = collection.mutable.Map[Symbol, Symbol]()
+ val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
def param(tree: Tree): Symbol =
paramCache.getOrElseUpdate(tree.symbol, {
val sym = tree.symbol
@@ -630,7 +627,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroDef.owner)
} else
targ.tpe
- if (tpe.isConcrete) context.TypeTag(tpe) else context.AbsTypeTag(tpe)
+ context.WeakTypeTag(tpe)
})
macroTraceVerbose("tags: ")(tags)
@@ -713,7 +710,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
var typechecked = typecheck("macro def return type", expanded, expectedTpe)
typechecked = typecheck("expected type", typechecked, pt)
- typechecked addAttachment MacroExpansionAttachment(expandee)
+ typechecked updateAttachment MacroExpansionAttachment(expandee)
} finally {
popMacroContext()
}
@@ -762,7 +759,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case (false, true) =>
macroLogLite("macro expansion is delayed: %s".format(expandee))
delayed += expandee -> undetparams
- expandee addAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(macroArgs(typer, expandee).c))
+ expandee updateAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(macroArgs(typer, expandee).c))
Delay(expandee)
case (false, false) =>
import typer.TyperErrorGen._
@@ -830,11 +827,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, collection.mutable.Set[Int]]
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
private def isDelayed(expandee: Tree) = delayed contains expandee
- private def calculateUndetparams(expandee: Tree): collection.mutable.Set[Int] =
+ private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
delayed.get(expandee).getOrElse {
- val calculated = collection.mutable.Set[Symbol]()
+ val calculated = scala.collection.mutable.Set[Symbol]()
expandee foreach (sub => {
def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
if (sub.symbol != null) traverse(sub.symbol)
@@ -872,13 +869,13 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
new Transformer {
override def transform(tree: Tree) = super.transform(tree match {
// todo. expansion should work from the inside out
- case wannabe if (delayed contains wannabe) && calculateUndetparams(wannabe).isEmpty =>
- val context = wannabe.attachments.get[MacroRuntimeAttachment].get.typerContext
- delayed -= wannabe
+ case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+ val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
+ delayed -= tree
context.implicitsEnabled = typer.context.implicitsEnabled
context.enrichmentEnabled = typer.context.enrichmentEnabled
context.macrosEnabled = typer.context.macrosEnabled
- macroExpand(newTyper(context), wannabe, EXPRmode, WildcardType)
+ macroExpand(newTyper(context), tree, EXPRmode, WildcardType)
case _ =>
tree
})
@@ -886,7 +883,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
}
object MacrosStats {
- import reflect.internal.TypesStats.typerNanos
+ import scala.reflect.internal.TypesStats.typerNanos
val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 4f597f97c9..91dcd90962 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -10,7 +10,7 @@ import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ ojoin }
import scala.reflect.ClassTag
import scala.reflect.runtime.{ universe => ru }
-import language.higherKinds
+import scala.language.higherKinds
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
@@ -369,7 +369,7 @@ trait MethodSynthesis {
}
/** A synthetic method which performs the implicit conversion implied by
- * the declaration of an implicit class. Yet to be written.
+ * the declaration of an implicit class.
*/
case class ImplicitClassWrapper(tree: ClassDef) extends DerivedFromClassDef {
def completer(sym: Symbol): Type = ??? // not needed
@@ -377,7 +377,7 @@ trait MethodSynthesis {
def derivedSym: Symbol = {
// Only methods will do! Don't want to pick up any stray
// companion objects of the same name.
- val result = enclClass.info decl name suchThat (_.isMethod)
+ val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic)
assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
result
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 77d1260564..9e66d696cb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -192,6 +192,10 @@ trait Namers extends MethodSynthesis {
if (!allowsOverload(sym)) {
val prev = scope.lookupEntry(sym.name)
if ((prev ne null) && prev.owner == scope && conflict(sym, prev.sym)) {
+ if (sym.isSynthetic || prev.sym.isSynthetic) {
+ handleSyntheticNameConflict(sym, prev.sym)
+ handleSyntheticNameConflict(prev.sym, sym)
+ }
DoubleDefError(sym, prev.sym)
sym setInfo ErrorType
scope unlink prev.sym // let them co-exist...
@@ -202,6 +206,14 @@ trait Namers extends MethodSynthesis {
scope enter sym
}
+ /** Logic to handle name conflicts of synthetically generated symbols
+ * We handle right now: t6227
+ */
+ def handleSyntheticNameConflict(sym1: Symbol, sym2: Symbol) = {
+ if (sym1.isImplicit && sym1.isMethod && sym2.isModule && sym2.companionClass.isCaseClass)
+ validate(sym2.companionClass)
+ }
+
def enterSym(tree: Tree): Context = {
def dispatch() = {
var returnContext = this.context
@@ -347,10 +359,39 @@ trait Namers extends MethodSynthesis {
}
}
+ /** Given a ClassDef or ModuleDef, verifies there isn't a companion which
+ * has been defined in a separate file.
+ */
+ private def validateCompanionDefs(tree: ImplDef) {
+ val sym = tree.symbol
+ if (sym eq NoSymbol) return
+
+ val ctx = if (context.owner.isPackageObjectClass) context.outer else context
+ val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
+ val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+ val fails = (
+ module.isModule
+ && clazz.isClass
+ && !module.isSynthetic
+ && !clazz.isSynthetic
+ && (clazz.sourceFile ne null)
+ && (module.sourceFile ne null)
+ && !(module isCoDefinedWith clazz)
+ )
+ if (fails) {
+ context.unit.error(tree.pos, (
+ s"Companions '$clazz' and '$module' must be defined in same file:\n"
+ + s" Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}")
+ )
+ }
+ }
+
def enterModuleDef(tree: ModuleDef) = {
val sym = enterModuleSymbol(tree)
sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree)
sym setInfo completerOf(tree)
+ validateCompanionDefs(tree)
+ sym
}
/** Enter a module symbol. The tree parameter can be either
@@ -455,7 +496,6 @@ trait Namers extends MethodSynthesis {
// for Java code importing Scala objects
if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
typer.TyperErrorGen.NotAMemberError(tree, expr, from)
- typer.infer.setError(tree)
}
}
// Setting the position at the import means that if there is
@@ -597,7 +637,7 @@ trait Namers extends MethodSynthesis {
MaxParametersCaseClassError(tree)
val m = ensureCompanionObject(tree, caseModuleDef)
- m.moduleClass.addAttachment(new ClassForCaseCompanionAttachment(tree))
+ m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
}
val hasDefault = impl.body exists {
case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
@@ -605,7 +645,7 @@ trait Namers extends MethodSynthesis {
}
if (hasDefault) {
val m = ensureCompanionObject(tree)
- m.addAttachment(new ConstructorDefaultsAttachment(tree, null))
+ m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
@@ -623,6 +663,7 @@ trait Namers extends MethodSynthesis {
}
else context.unit.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter")
}
+ validateCompanionDefs(tree)
}
// this logic is needed in case typer was interrupted half
@@ -687,7 +728,7 @@ trait Namers extends MethodSynthesis {
// }
}
- def moduleClassTypeCompleter(tree: Tree) = {
+ def moduleClassTypeCompleter(tree: ModuleDef) = {
mkTypeCompleter(tree) { sym =>
val moduleSymbol = tree.symbol
assert(moduleSymbol.moduleClass == sym, moduleSymbol.moduleClass)
@@ -1131,7 +1172,7 @@ trait Namers extends MethodSynthesis {
// symbol will be re-entered in the scope but the default parameter will not.
val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
case Some(att) => att.defaultGetters += default
- case None => meth.addAttachment(new DefaultsOfLocalMethodAttachment(default))
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
}
}
} else if (baseHasDefault) {
@@ -1390,6 +1431,7 @@ trait Namers extends MethodSynthesis {
fail(ImplicitAtToplevel)
}
if (sym.isClass) {
+ checkNoConflict(IMPLICIT, CASE)
if (sym.isAnyOverride && !sym.hasFlag(TRAIT))
fail(OverrideClass)
} else {
@@ -1532,18 +1574,11 @@ trait Namers extends MethodSynthesis {
* call this method?
*/
def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
- try {
- original.companionSymbol orElse {
- ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
- (original.isTerm || sym.hasModuleFlag) &&
- (sym isCoDefinedWith original)
- )
- }
- }
- catch {
- case e: InvalidCompanions =>
- ctx.unit.error(original.pos, e.getMessage)
- NoSymbol
+ original.companionSymbol orElse {
+ ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
+ (original.isTerm || sym.hasModuleFlag) &&
+ (sym isCoDefinedWith original)
+ )
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index c60118a8b4..2dc3dc3dbd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -9,12 +9,12 @@ package typechecker
import symtab._
import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC, ARTIFACT}
-import language.postfixOps
+import scala.language.postfixOps
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.collection.mutable.HashSet
import scala.collection.mutable.HashMap
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
import scala.reflect.internal.Types
/** Translate pattern matching.
@@ -1558,7 +1558,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var currId = 0
}
case class Test(cond: Cond, treeMaker: TreeMaker) {
- // private val reusedBy = new collection.mutable.HashSet[Test]
+ // private val reusedBy = new scala.collection.mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
@@ -1587,7 +1587,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
object EqualityCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
+ private val uniques = new scala.collection.mutable.HashMap[(Tree, Tree), EqualityCond]
def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
}
@@ -1596,7 +1596,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
object NonNullCond {
- private val uniques = new collection.mutable.HashMap[Tree, NonNullCond]
+ private val uniques = new scala.collection.mutable.HashMap[Tree, NonNullCond]
def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
def unapply(c: NonNullCond) = Some(c.testedPath)
}
@@ -1605,7 +1605,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
object TypeCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
+ private val uniques = new scala.collection.mutable.HashMap[(Tree, Type), TypeCond]
def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
}
@@ -1654,8 +1654,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
class TreeMakersToConds(val root: Symbol) {
// a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- private val pointsToBound = collection.mutable.HashSet(root)
- private val trees = collection.mutable.HashSet.empty[Tree]
+ private val pointsToBound = scala.collection.mutable.HashSet(root)
+ private val trees = scala.collection.mutable.HashSet.empty[Tree]
// the substitution that renames variables to variables in pointsToBound
private var normalize: Substitution = EmptySubstitution
@@ -1956,7 +1956,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
- val vars = new collection.mutable.HashSet[Var]
+ val vars = new scala.collection.mutable.HashSet[Var]
object gatherEqualities extends PropTraverser {
override def apply(p: Prop) = p match {
@@ -2261,7 +2261,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nextId = {_nextId += 1; _nextId}
def resetUniques() = {_nextId = 0; uniques.clear()}
- private val uniques = new collection.mutable.HashMap[Tree, Var]
+ private val uniques = new scala.collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
class Var(val path: Tree, staticTp: Type) extends AbsVar {
@@ -2273,7 +2273,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
@inline private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
// don't access until all potential equalities have been registered using registerEquality
- private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym]
+ private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym]
// when looking at the domain, we only care about types we can check at run time
val staticTpCheckable: Type = checkableType(staticTp)
@@ -2386,7 +2386,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
*/
- val excludedPair = new collection.mutable.HashSet[ExcludedPair]
+ val excludedPair = new scala.collection.mutable.HashSet[ExcludedPair]
case class ExcludedPair(a: Const, b: Const) {
override def equals(o: Any) = o match {
@@ -2440,7 +2440,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private var _nextValueId = 0
def nextValueId = {_nextValueId += 1; _nextValueId}
- private val uniques = new collection.mutable.HashMap[Type, Const]
+ private val uniques = new scala.collection.mutable.HashMap[Type, Const]
private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
@@ -2454,7 +2454,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
fresh
})
- private val trees = collection.mutable.HashSet.empty[Tree]
+ private val trees = scala.collection.mutable.HashSet.empty[Tree]
// hashconsing trees (modulo value-equality)
private[SymbolicMatchAnalysis] def uniqueTpForTree(t: Tree): Type =
@@ -2915,7 +2915,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
}
- private val uniques = new collection.mutable.HashMap[Var, VariableAssignment]
+ private val uniques = new scala.collection.mutable.HashMap[Var, VariableAssignment]
private def unique(variable: Var): VariableAssignment =
uniques.getOrElseUpdate(variable, {
val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
@@ -2941,7 +2941,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
// node in the tree that describes how to construct a counter-example
- case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: collection.mutable.Map[Symbol, VariableAssignment]) {
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
@@ -3034,8 +3034,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val testss = approximateMatchConservative(prevBinder, cases)
// interpret:
- val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
- val tested = new collection.mutable.HashSet[Cond]
+ val dependencies = new scala.collection.mutable.LinkedHashMap[Test, Set[Cond]]
+ val tested = new scala.collection.mutable.HashSet[Cond]
def storeDependencies(test: Test) = {
val cond = test.cond
@@ -3083,7 +3083,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// then, collapse these contiguous sequences of reusing tests
// store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
// replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ val reused = new scala.collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
var okToCall = false
val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
@@ -3317,7 +3317,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
var remainingCases = cases
- val collapsed = collection.mutable.ListBuffer.empty[CaseDef]
+ val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
// when some of collapsed cases (except for the default case itself) did not include an un-guarded case
// we'll need to emit a labeldef for the default case
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 9201981635..84ec86b194 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -7,10 +7,10 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
-import language.postfixOps
+import scala.language.postfixOps
/** <p>
* Post-attribution checking and transformation.
@@ -38,7 +38,7 @@ import language.postfixOps
*
* @todo Check whether we always check type parameter bounds.
*/
-abstract class RefChecks extends InfoTransform with reflect.internal.transform.RefChecks {
+abstract class RefChecks extends InfoTransform with scala.reflect.internal.transform.RefChecks {
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -430,6 +430,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
overrideError("cannot override a macro")
} else {
checkOverrideTypes()
+ checkOverrideDeprecated()
if (settings.warnNullaryOverride.value) {
if (other.paramss.isEmpty && !member.paramss.isEmpty) {
unit.warning(member.pos, "non-nullary method overrides nullary method")
@@ -508,6 +509,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
}
+
+ def checkOverrideDeprecated() {
+ if (other.hasDeprecatedOverridingAnnotation) {
+ val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
+ val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(member.pos, msg)
+ }
+ }
}
val opc = new overridingPairs.Cursor(clazz)
@@ -1197,6 +1206,23 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case _ =>
}
+ // SI-6276 warn for `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think.
+ def checkInfiniteLoop(valOrDef: ValOrDefDef) {
+ def callsSelf = valOrDef.rhs match {
+ case t @ (Ident(_) | Select(This(_), _)) =>
+ t hasSymbolWhich (_.accessedOrSelf == valOrDef.symbol)
+ case _ => false
+ }
+ val trivialInifiniteLoop = (
+ !valOrDef.isErroneous
+ && !valOrDef.symbol.isValueParameter
+ && valOrDef.symbol.paramss.isEmpty
+ && callsSelf
+ )
+ if (trivialInifiniteLoop)
+ unit.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively")
+ }
+
// Transformation ------------------------------------------------------------
/* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
@@ -1618,6 +1644,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
if (clazz.isTrait)
unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
+ else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+ unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
}
}
@@ -1638,6 +1666,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
+ checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
if (settings.warnNullaryUnit.value)
checkNullaryMethodReturnType(sym)
if (settings.warnInaccessible.value) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 63050bc032..981ba10183 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -287,16 +287,18 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// FIXME - this should be unified with needsProtectedAccessor, but some
// subtlety which presently eludes me is foiling my attempts.
val shouldEnsureAccessor = (
- currentClass.isTrait
+ currentClass.isTrait
&& sym.isProtected
&& sym.enclClass != currentClass
&& !sym.owner.isTrait
&& (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol))
+ && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && !needsProtectedAccessor(sym, tree.pos))
if (shouldEnsureAccessor) {
log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
ensureAccessor(sel)
- } else
+ }
+ else
mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
}
@@ -525,7 +527,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
)
true
}
- isCandidate && !host.isPackageClass && !isSelfType
+ def isJavaProtected = host.isTrait && sym.isJavaDefined && {
+ restrictionError(pos, unit,
+ s"""|$clazz accesses protected $sym inside a concrete trait method.
+ |Add an accessor in a class extending ${sym.enclClass} as a workaround.""".stripMargin
+ )
+ true
+ }
+ isCandidate && !host.isPackageClass && !isSelfType && !isJavaProtected
}
/** Return the innermost enclosing class C of referencingClass for which either
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index d227f485c2..67afb0c118 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -63,7 +63,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// in the original order.
def accessors = clazz.caseFieldAccessors sortBy { acc =>
originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name append "$").asInstanceOf[Name]) // [Eugene] why do we need this cast?
+ (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
}
}
val arity = accessors.size
@@ -87,7 +87,7 @@ trait SyntheticMethods extends ast.TreeDSL {
)
def forwardToRuntime(method: Symbol): Tree =
- forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_").asInstanceOf[Name]))(mkThis :: _) // [Eugene] why do we need this cast?
+ forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _)
def callStaticsMethod(name: String)(args: Tree*): Tree = {
val method = termMember(RuntimeStaticsModule, name)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index f82e009be8..167bf5c857 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -26,7 +26,7 @@ trait Tags {
/** Finds in scope or materializes a ClassTag.
* Should be used instead of ClassManifest every time compiler needs to persist an erasure.
*
- * Once upon a time, we had an `ErasureTag` which was to `ClassTag` the same that `AbsTypeTag` is for `TypeTag`.
+ * Once upon a time, we had an `ErasureTag` which was to `ClassTag` the same that `WeakTypeTag` is for `TypeTag`.
* However we found out that we don't really need this concept, so it got removed.
*
* @param pos Position for error reporting. Please, provide meaningful value.
@@ -43,7 +43,7 @@ trait Tags {
resolveTag(pos, taggedTp, allowMaterialization)
}
- /** Finds in scope or materializes an AbsTypeTag (if `concrete` is false) or a TypeTag (if `concrete` is true).
+ /** Finds in scope or materializes an WeakTypeTag (if `concrete` is false) or a TypeTag (if `concrete` is true).
*
* @param pos Position for error reporting. Please, provide meaningful value.
* @param pre Prefix that represents a universe this type tag will be bound to.
@@ -60,7 +60,7 @@ trait Tags {
* EmptyTree if `allowMaterialization` is false, and there is no array tag in scope.
*/
def resolveTypeTag(pos: Position, pre: Type, tp: Type, concrete: Boolean, allowMaterialization: Boolean = true): Tree = {
- val tagSym = if (concrete) TypeTagClass else AbsTypeTagClass
+ val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass
val tagTp = if (pre == NoType) TypeRef(BaseUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
val taggedTp = appliedType(tagTp, List(tp))
resolveTag(pos, taggedTp, allowMaterialization)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index f0dca64a00..e5c0f5767c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -59,6 +59,19 @@ trait TypeDiagnostics {
* the map, the addendum should also be printed.
*/
private var addendums = perRunCaches.newMap[Position, () => String]()
+ private var isTyperInPattern = false
+
+ /** Devising new ways of communicating error info out of
+ * desperation to work on error messages. This is used
+ * by typedPattern to wrap its business so we can generate
+ * a sensible error message when things go south.
+ */
+ def typingInPattern[T](body: => T): T = {
+ val saved = isTyperInPattern
+ isTyperInPattern = true
+ try body
+ finally isTyperInPattern = saved
+ }
def setAddendum(pos: Position, msg: () => String) =
if (pos != NoPosition)
@@ -138,13 +151,17 @@ trait TypeDiagnostics {
def hasParams = tree.tpe.paramSectionCount > 0
def preResultString = if (hasParams) ": " else " of type "
- def nullMessage = "expression of type " + tree.tpe
- def overloadedMessage = "overloaded method " + sym + " with alternatives:\n" + alternativesString(tree)
+ def patternMessage = "pattern " + tree.tpe.finalResultType + valueParamsString(tree.tpe)
+ def exprMessage = "expression of type " + tree.tpe
+ def overloadedMessage = s"overloaded method $sym with alternatives:\n" + alternativesString(tree)
def moduleMessage = "" + sym
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if (sym == null) nullMessage
+ if ((sym eq null) || (sym eq NoSymbol)) {
+ if (isTyperInPattern) patternMessage
+ else exprMessage
+ }
else if (sym.isOverloaded) overloadedMessage
else if (sym.isModule) moduleMessage
else if (sym.name == nme.apply) applyMessage
@@ -252,6 +269,13 @@ trait TypeDiagnostics {
}
"" // no elaborable variance situation found
}
+
+ // For found/required errors where AnyRef would have sufficed:
+ // explain in greater detail.
+ def explainAnyVsAnyRef(found: Type, req: Type): String = {
+ if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+ }
+
// TODO - figure out how to avoid doing any work at all
// when the message will never be seen. I though context.reportErrors
// being false would do that, but if I return "<suppressed>" under
@@ -261,7 +285,10 @@ trait TypeDiagnostics {
";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
"\n required: " + req + existentialContext(req) + explainAlias(req)
)
- withDisambiguation(Nil, found, req)(baseMessage) + explainVariance(found, req)
+ ( withDisambiguation(Nil, found, req)(baseMessage)
+ + explainVariance(found, req)
+ + explainAnyVsAnyRef(found, req)
+ )
}
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index f6baf02c3e..b374ff53a5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,10 +13,9 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable
-import scala.reflect.internal.util.BatchSourceFile
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
import mutable.ListBuffer
import symtab.Flags._
-import reflect.internal.util.Statistics
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -233,10 +232,11 @@ trait Typers extends Modes with Adaptations with Tags {
* @param tree ...
* @return ...
*/
- def checkStable(tree: Tree): Tree =
+ def checkStable(tree: Tree): Tree = (
if (treeInfo.isExprSafeToInline(tree)) tree
else if (tree.isErrorTyped) tree
else UnstableTreeError(tree)
+ )
/** Would tree be a stable (i.e. a pure expression) if the type
* of its symbol was not volatile?
@@ -578,7 +578,7 @@ trait Typers extends Modes with Adaptations with Tags {
// to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
def dealias(sym: Symbol) =
- (atPos(tree.pos) {gen.mkAttributedRef(sym)}, sym.owner.thisType)
+ (atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
case nme.List => return dealias(ListModule)
case nme.Seq => return dealias(SeqModule)
@@ -778,7 +778,7 @@ trait Typers extends Modes with Adaptations with Tags {
var raw = featureDesc + " " + req + " be enabled\n" +
"by making the implicit value language." + featureName + " visible."
if (!(currentRun.reportedFeature contains featureTrait))
- raw += "\nThis can be achieved by adding the import clause 'import language." + featureName + "'\n" +
+ raw += "\nThis can be achieved by adding the import clause 'import scala.language." + featureName + "'\n" +
"or by setting the compiler option -language:" + featureName + ".\n" +
"See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
"why the feature " + req + " be explicitly enabled."
@@ -885,7 +885,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
checkParamsConvertible(tree, tree.tpe)
- val tree0 = etaExpand(context.unit, tree)
+ val tree0 = etaExpand(context.unit, tree, this)
// println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
if (context.undetparams.nonEmpty) {
@@ -1056,7 +1056,7 @@ trait Typers extends Modes with Adaptations with Tags {
case other =>
other
}
- typed(atPos(tree.pos)(Select(qual, nme.apply)), mode, pt)
+ typed(atPos(tree.pos)(Select(qual setPos tree.pos.makeTransparent, nme.apply)), mode, pt)
}
// begin adapt
@@ -1107,10 +1107,12 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
def applyPossible = {
def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- if ((mode & TAPPmode) != 0)
- tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if ((mode & TAPPmode) != 0)
+ tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
+ else
+ applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ )
}
if (tree.isType)
adaptType()
@@ -1404,14 +1406,15 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(clazz.pos, "value class needs to have exactly one public val parameter")
}
}
- body foreach {
- case md: ModuleDef =>
- unit.error(md.pos, "value class may not have nested module definitions")
- case cd: ClassDef =>
- unit.error(cd.pos, "value class may not have nested class definitions")
- case md: DefDef if md.symbol.isConstructor && !md.symbol.isPrimaryConstructor =>
- unit.error(md.pos, "value class may not have secondary constructors")
- case _ =>
+
+ def valueClassMayNotHave(at: Tree, what: String) = unit.error(at.pos, s"value class may not have $what")
+ body.foreach {
+ case dd: DefDef if dd.symbol.isAuxiliaryConstructor => valueClassMayNotHave(dd, "secondary constructors")
+ case t => t.foreach {
+ case md: ModuleDef => valueClassMayNotHave(md, "nested module definitions")
+ case cd: ClassDef => valueClassMayNotHave(cd, "nested class definitions")
+ case _ =>
+ }
}
for (tparam <- clazz.typeParams)
if (tparam hasAnnotation definitions.SpecializedClass)
@@ -1576,6 +1579,12 @@ trait Typers extends Modes with Adaptations with Tags {
if (psym.isFinal)
pending += ParentFinalInheritanceError(parent, psym)
+ if (psym.hasDeprecatedInheritanceAnnotation) {
+ val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
+ val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(parent.pos, msg)
+ }
+
if (psym.isSealed && !phase.erasedTypes)
if (context.unit.source.file == psym.sourceFile)
psym addChild context.owner
@@ -1870,7 +1879,7 @@ trait Typers extends Modes with Adaptations with Tags {
* @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
- log("computing param aliases for "+clazz+":"+clazz.primaryConstructor.tpe+":"+rhs)//debug
+ debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
def decompose(call: Tree): (Tree, List[Tree]) = call match {
case Apply(fn, args) =>
val (superConstr, args1) = decompose(fn)
@@ -1977,18 +1986,21 @@ trait Typers extends Modes with Adaptations with Tags {
case PolyType(_, restpe) => restpe
case _ => NoType
}
-
+ def failStruct(what: String) =
+ fail(s"Parameter type in structural refinement may not refer to $what")
for (paramType <- tp.paramTypes) {
val sym = paramType.typeSymbol
if (sym.isAbstractType) {
if (!sym.hasTransOwner(meth.owner))
- fail("Parameter type in structural refinement may not refer to an abstract type defined outside that refinement")
+ failStruct("an abstract type defined outside that refinement")
else if (!sym.hasTransOwner(meth))
- fail("Parameter type in structural refinement may not refer to a type member of that refinement")
+ failStruct("a type member of that refinement")
}
+ if (sym.isDerivedValueClass)
+ failStruct("a user-defined value class")
if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- fail("Parameter type in structural refinement may not refer to the type of that refinement (self type)")
+ failStruct("the type of that refinement (self type)")
}
}
def typedUseCase(useCase: UseCase) {
@@ -2394,7 +2406,7 @@ trait Typers extends Modes with Adaptations with Tags {
else targs.init
def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) =
- if (formals.isEmpty) { MissingParameterTypeAnonMatchError(tree, pt); Nil }
+ if (formals.isEmpty || !formals.forall(isFullyDefined)) { MissingParameterTypeAnonMatchError(tree, pt); Nil }
else methodSym newSyntheticValueParams formals
def mkSel(params: List[Symbol]) =
@@ -2475,7 +2487,7 @@ trait Typers extends Modes with Adaptations with Tags {
match_ setType B1.tpe
// the default uses applyOrElse's first parameter since the scrut's type has been widened
- val body = methodBodyTyper.virtualizedMatch(match_ addAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
DefDef(methodSym, body)
}
@@ -2493,7 +2505,7 @@ trait Typers extends Modes with Adaptations with Tags {
methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe)
- val body = methodBodyTyper.virtualizedMatch(match_ addAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
DefDef(methodSym, body)
}
@@ -2531,7 +2543,7 @@ trait Typers extends Modes with Adaptations with Tags {
* @param pt ...
* @return ...
*/
- def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+ private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
val numVparams = fun.vparams.length
if (numVparams > definitions.MaxFunctionArity)
return MaxFunctionArityError(fun)
@@ -2615,7 +2627,7 @@ trait Typers extends Modes with Adaptations with Tags {
// todo. investigate whether something can be done about this
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
- templ addAttachment att.copy(stats = stats1)
+ templ updateAttachment att.copy(stats = stats1)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -2742,12 +2754,18 @@ trait Typers extends Modes with Adaptations with Tags {
// this code by associating defaults and companion objects
// with the original tree instead of the new symbol.
def matches(stat: Tree, synt: Tree) = (stat, synt) match {
+ // synt is default arg for stat
case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) =>
mods.hasDefaultFlag && syntName.toString.startsWith(statName.toString)
+ // synt is companion module
case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) =>
className.toTermName == moduleName
+ // synt is implicit def for implicit class (#6278)
+ case (ClassDef(cmods, cname, _, _), DefDef(dmods, dname, _, _, _, _)) =>
+ cmods.isImplicit && dmods.isImplicit && cname.toTermName == dname
+
case _ => false
}
@@ -3224,7 +3242,7 @@ trait Typers extends Modes with Adaptations with Tags {
val nbSubPats = args.length
val (formals, formalsExpanded) = extractorFormalTypes(resTp, nbSubPats, fun1.symbol)
- if (formals == null) duplErrorTree(WrongNumberArgsPatternError(tree, fun))
+ if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
// This used to be the following (failing) assert:
@@ -3400,7 +3418,7 @@ trait Typers extends Modes with Adaptations with Tags {
else argss.head
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
- val names = new collection.mutable.HashSet[Symbol]
+ val names = new scala.collection.mutable.HashSet[Symbol]
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
val nvPairs = args map {
@@ -3431,7 +3449,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (hasError) annotationError
- else AnnotationInfo(annType, List(), nvPairs map {p => (p._1.asInstanceOf[Name], p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos)) // [Eugene] why do we need this cast?
+ else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
} else if (requireJava) {
reportAnnotationError(NestedAnnotationError(ann, annType))
@@ -3614,8 +3632,8 @@ trait Typers extends Modes with Adaptations with Tags {
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
- var localSyms = collection.immutable.Set[Symbol]()
- var boundSyms = collection.immutable.Set[Symbol]()
+ var localSyms = scala.collection.immutable.Set[Symbol]()
+ var boundSyms = scala.collection.immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
@@ -3788,7 +3806,8 @@ trait Typers extends Modes with Adaptations with Tags {
case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs))
case _ => gen.mkTuple(List(CODE.LIT(""), arg))
}
- typed(treeCopy.Apply(orig, fun, args map argToBinding), mode, pt)
+ val t = treeCopy.Apply(orig, fun, args map argToBinding)
+ wrapErrors(t, _.typed(t, mode, pt))
}
/** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic.
@@ -3810,25 +3829,34 @@ trait Typers extends Modes with Adaptations with Tags {
* - simplest solution: have two method calls
*
*/
- def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] =
+ def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
+ debuglog(s"mkInvoke($cxTree, $tree, $qual, $name)")
acceptsApplyDynamicWithType(qual, name) map { tp =>
// tp eq NoType => can call xxxDynamic, but not passing any type args (unless specified explicitly by the user)
// in scala-virtualized, when not NoType, tp is passed as type argument (for selection on a staged Struct)
- // strip off type application -- we're not doing much with outer, so don't bother preserving cxTree's attributes etc
- val (outer, explicitTargs) = cxTree match {
- case TypeApply(fun, targs) => (fun, targs)
- case Apply(TypeApply(fun, targs), args) => (Apply(fun, args), targs)
- case t => (t, Nil)
+ // strip off type application -- we're not doing much with outer,
+ // so don't bother preserving cxTree's attributes etc
+ val cxTree1 = cxTree match {
+ case t: ValOrDefDef => t.rhs
+ case t => t
+ }
+ val (outer, explicitTargs) = cxTree1 match {
+ case TypeApply(fun, targs) => (fun, targs)
+ case Apply(TypeApply(fun, targs), args) => (Apply(fun, args), targs)
+ case t => (t, Nil)
}
-
@inline def hasNamedArg(as: List[Tree]) = as.collectFirst{case AssignOrNamedArg(lhs, rhs) =>}.nonEmpty
+ def desugaredApply = tree match {
+ case Select(`qual`, nme.apply) => true
+ case _ => false
+ }
// note: context.tree includes at most one Apply node
// thus, we can't use it to detect we're going to receive named args in expressions such as:
// qual.sel(a)(a2, arg2 = "a2")
val oper = outer match {
- case Apply(`tree`, as) =>
+ case Apply(q, as) if q == tree || desugaredApply =>
val oper =
if (hasNamedArg(as)) nme.applyDynamicNamed
else nme.applyDynamic
@@ -3846,6 +3874,14 @@ trait Typers extends Modes with Adaptations with Tags {
atPos(qual.pos)(Apply(tappSel, List(Literal(Constant(name.decode)))))
}
+ }
+
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
+ silent(typeTree) match {
+ case SilentResultValue(r) => r
+ case SilentTypeError(err) => DynamicRewriteError(tree, err)
+ }
+ }
}
@inline final def deindentTyping() = context.typingIndentLevel -= 2
@@ -4039,7 +4075,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
- typed1(Apply(lhs1, List(rhs1)), mode, pt)
+ val t = Apply(lhs1, List(rhs1))
+ dyna.wrapErrors(t, _.typed1(t, mode, pt))
}
else fail()
}
@@ -4066,7 +4103,7 @@ trait Typers extends Modes with Adaptations with Tags {
if ( opt.virtPatmat && !isPastTyper
&& thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
&& thenTp =:= elseTp
- ) (thenp1.tpe, false) // use unpacked type
+ ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
// TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
@@ -4507,7 +4544,9 @@ trait Typers extends Modes with Adaptations with Tags {
* @return ...
*/
def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
- def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map (typed1(_, mode, pt))
+ def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
+ dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
+ }
val sym = tree.symbol orElse member(qual, name) orElse {
// symbol not found? --> try to convert implicitly to a type that does have the required
@@ -4928,7 +4967,7 @@ trait Typers extends Modes with Adaptations with Tags {
//Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id)
val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos)
newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ)
- templ addAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
+ templ updateAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
tree setType self
}
}
@@ -5201,7 +5240,10 @@ trait Typers extends Modes with Adaptations with Tags {
def typedSingletonTypeTree(tree: SingletonTypeTree) = {
val ref1 = checkStable(
- typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe))
+ context.withImplicitsDisabled(
+ typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
+ )
+ )
tree setType ref1.tpe.resultType
}
@@ -5234,149 +5276,60 @@ trait Typers extends Modes with Adaptations with Tags {
// whatever type to tree; we just have to survive until a real error message is issued.
tree setType AnyClass.tpe
}
+ def typedFunction(fun: Function) = {
+ if (fun.symbol == NoSymbol)
+ fun.symbol = context.owner.newAnonymousFunctionValue(fun.pos)
+
+ typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
+ }
// begin typed1
//if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
-
tree match {
- case tree: Ident =>
- typedIdentOrWildcard(tree)
-
- case tree: Select =>
- typedSelectOrSuperCall(tree)
-
- case tree: Apply =>
- typedApply(tree)
-
- case tree: TypeTree =>
- typedTypeTree(tree)
-
- case tree: Literal =>
- typedLiteral(tree)
-
- case tree: This =>
- typedThis(tree)
-
- case tree: ValDef =>
- typedValDef(tree)
-
- case tree: DefDef =>
- // flag default getters for constructors. An actual flag would be nice. See SI-5543.
- //val flag = ddef.mods.hasDefaultFlag && ddef.mods.hasFlag(PRESUPER)
- defDefTyper(tree).typedDefDef(tree)
-
- case tree: Block =>
- typerWithLocalContext(context.makeNewScope(tree, context.owner)){
- _.typedBlock(tree, mode, pt)
- }
-
- case tree: If =>
- typedIf(tree)
-
- case tree: TypeApply =>
- typedTypeApply(tree)
-
- case tree: AppliedTypeTree =>
- typedAppliedTypeTree(tree)
-
- case tree: Bind =>
- typedBind(tree)
-
- case tree: Function =>
- if (tree.symbol == NoSymbol)
- tree.symbol = context.owner.newAnonymousFunctionValue(tree.pos)
- typerWithLocalContext(context.makeNewScope(tree, tree.symbol))(_.typedFunction(tree, mode, pt))
-
- case tree: Match =>
- typedVirtualizedMatch(tree)
-
- case tree: New =>
- typedNew(tree)
-
- case Assign(lhs, rhs) =>
- typedAssign(lhs, rhs)
-
- case AssignOrNamedArg(lhs, rhs) => // called by NamesDefaults in silent typecheck
- typedAssign(lhs, rhs)
-
- case tree: Super =>
- typedSuper(tree)
-
- case tree: TypeBoundsTree =>
- typedTypeBoundsTree(tree)
-
- case tree: Typed =>
- typedTyped(tree)
-
- case tree: ClassDef =>
- newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
-
- case tree: ModuleDef =>
- newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
-
- case tree: TypeDef =>
- typedTypeDef(tree)
-
- case tree: LabelDef =>
- labelTyper(tree).typedLabelDef(tree)
-
- case tree: PackageDef =>
- typedPackageDef(tree)
-
- case tree: DocDef =>
- typedDocDef(tree)
-
- case tree: Annotated =>
- typedAnnotated(tree)
-
- case tree: SingletonTypeTree =>
- typedSingletonTypeTree(tree)
-
- case tree: SelectFromTypeTree =>
- typedSelectFromTypeTree(tree)
-
- case tree: CompoundTypeTree =>
- typedCompoundTypeTree(tree)
-
- case tree: ExistentialTypeTree =>
- typedExistentialTypeTree(tree)
-
- case tree: Return =>
- typedReturn(tree)
-
- case tree: Try =>
- typedTry(tree)
-
- case tree: Throw =>
- typedThrow(tree)
-
- case tree: Alternative =>
- typedAlternative(tree)
-
- case tree: Star =>
- typedStar(tree)
-
- case tree: UnApply =>
- typedUnApply(tree)
-
- case tree: ArrayValue =>
- typedArrayValue(tree)
-
- case tree: ApplyDynamic =>
- typedApplyDynamic(tree)
-
- case tree: ReferenceToBoxed =>
- typedReferenceToBoxed(tree)
-
- case tree: TypeTreeWithDeferredRefCheck =>
- tree // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
-
- case tree: Import =>
- assert(forInteractive, "!forInteractive") // should not happen in normal circumstances.
- tree setType tree.symbol.tpe
-
- case _ =>
- abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: TypeTree => typedTypeTree(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: This => typedThis(tree)
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case tree: DocDef => typedDocDef(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
+ case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
+ case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
+ case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case tree: UnApply => typedUnApply(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
+ case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
}
}
@@ -5519,7 +5472,7 @@ trait Typers extends Modes with Adaptations with Tags {
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
- context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
}
/** Types a (fully parameterized) type tree */
@@ -5624,8 +5577,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
object TypersStats {
- import reflect.internal.TypesStats._
- import reflect.internal.BaseTypeSeqsStats._
+ import scala.reflect.internal.TypesStats._
+ import scala.reflect.internal.BaseTypeSeqsStats._
val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
val typedSelectCount = Statistics.newCounter("#typechecked selections")
val typedApplyCount = Statistics.newCounter("#typechecked applications")
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 90d032518a..34344263e8 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -4,7 +4,7 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
import scala.reflect.internal.util.StringOps._
-import language.implicitConversions
+import scala.language.implicitConversions
object Exceptional {
def unwrap(x: Throwable): Throwable = x match {
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index 25caae9ecb..9de3a2427f 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -14,7 +14,7 @@ import java.net.URL
import scala.reflect.runtime.ReflectionUtils.unwrapHandler
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.{ ClassTag, classTag }
trait HasClassPath {
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
index 8cdb96c586..3682b9fb54 100644
--- a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -6,13 +6,13 @@
package scala.tools.nsc
package util
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
abstract class StatisticsInfo {
val global: Global
import global._
- import reflect.internal.TreesStats.nodeByType
+ import scala.reflect.internal.TreesStats.nodeByType
val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
@@ -35,4 +35,4 @@ abstract class StatisticsInfo {
for (q <- quants if q.showAt(phase.name)) inform(q.line)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 876fb18578..780e3eab88 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -9,7 +9,7 @@ import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter,
package object util {
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
// forwarder for old code that builds against 2.9 and 2.10
val Chars = scala.reflect.internal.Chars
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index 07c972899e..38e4e3c9f1 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -13,7 +13,7 @@ trait FastTrack {
import global._
import definitions._
- import language.implicitConversions
+ import scala.language.implicitConversions
private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
@@ -23,7 +23,7 @@ trait FastTrack {
def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
def run(c: MacroContext): Any = {
val result = expander((c, c.expandee))
- c.Expr[Nothing](result)(c.AbsTypeTag.Nothing)
+ c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
}
}
@@ -31,11 +31,11 @@ trait FastTrack {
var registry = Map[Symbol, FastTrackEntry]()
implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
MacroInternal_materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeClassTag(u, tt.tpe) }
- MacroInternal_materializeAbsTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
+ MacroInternal_materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
MacroInternal_materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
BaseUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
registry
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/FrontEnds.scala b/src/compiler/scala/tools/reflect/FrontEnds.scala
index d8f07fb2e5..d0c3c1c774 100644
--- a/src/compiler/scala/tools/reflect/FrontEnds.scala
+++ b/src/compiler/scala/tools/reflect/FrontEnds.scala
@@ -36,6 +36,16 @@ trait FrontEnds extends scala.reflect.api.FrontEnds {
def displayPrompt(): Unit =
frontEnd.interactive()
+
+ override def flush(): Unit = {
+ super.flush()
+ frontEnd.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ frontEnd.reset()
+ }
}
def wrapFrontEnd(frontEnd: FrontEnd): Reporter = new FrontEndToReporterProxy(frontEnd) {
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index 2505c1afb7..9e7d230a6a 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -12,7 +12,7 @@ trait ToolBox[U <: Universe] {
/** Underlying mirror of a ToolBox
*/
- val mirror: MirrorOf[u.type]
+ val mirror: u.Mirror
/** Front end of the toolbox.
*
@@ -80,18 +80,23 @@ trait ToolBox[U <: Universe] {
def resetLocalAttrs(tree: u.Tree): u.Tree
/** .. */
- def parseExpr(code: String): u.Tree
+ def parse(code: String): u.Tree
- /** Compiles and runs a tree using this ToolBox.
+ /** Compiles a tree using this ToolBox.
*
* If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols),
* then they all have to be resolved first using `Tree.substituteTypes`, or an error occurs.
*
* This spawns the compiler at the Namer phase, and pipelines the tree through that compiler.
- * Currently `runExpr` does not accept trees that already typechecked, because typechecking isn't idempotent.
+ * Currently `compile` does not accept trees that already typechecked, because typechecking isn't idempotent.
* For more info, take a look at https://issues.scala-lang.org/browse/SI-5464.
*/
- def runExpr(tree: u.Tree): Any
+ def compile(tree: u.Tree): () => Any
+
+ /** Compiles and runs a tree using this ToolBox.
+ * Is equivalent to `compile(tree)()`.
+ */
+ def eval(tree: u.Tree): Any
}
/** Represents an error during toolboxing
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 8cc5a4e531..091224c88a 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -26,7 +26,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
private class ToolBoxImpl(val frontEnd: FrontEnd, val options: String) extends ToolBox[U] { toolBoxSelf =>
val u: factorySelf.u.type = factorySelf.u
- val mirror: u.Mirror = factorySelf.mirror
+
+ lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
+ lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
@@ -45,7 +47,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
newTermName("__wrapper$" + wrapCount + "$" + java.util.UUID.randomUUID.toString.replace("-", ""))
}
- def verifyExpr(expr: Tree): Unit = {
+ def verify(expr: Tree): Unit = {
// Previously toolboxes used to typecheck their inputs before compiling.
// Actually, the initial demo by Martin first typechecked the reified tree,
// then ran it, which typechecked it again, and only then launched the
@@ -68,9 +70,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
}
- def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
+ def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
val freeTerms = expr0.freeTerms
- val freeTermNames = collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
+ val freeTermNames = scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
freeTerms foreach (ft => {
var name = ft.name.toString
val namesakes = freeTerms takeWhile (_ != ft) filter (ft2 => ft != ft2 && ft.name == ft2.name)
@@ -95,7 +97,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def transformDuringTyper(expr0: Tree, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
- verifyExpr(expr0)
+ verify(expr0)
// need to wrap the expr, because otherwise you won't be able to typecheck macros against something that contains free vars
var (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = false)
@@ -138,7 +140,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
unwrapped
}
- def typeCheckExpr(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
+ def typeCheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
@@ -168,10 +170,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
})
- def compileExpr(expr: Tree): (Object, java.lang.reflect.Method) = {
- verifyExpr(expr)
+ def compile(expr: Tree): () => Any = {
+ val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
+ val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
+ verify(expr)
- def wrapExpr(expr0: Tree): Tree = {
+ def wrap(expr0: Tree): ModuleDef = {
val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
@@ -209,11 +213,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
var cleanedUp = resetLocalAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
- cleanedUp
+ cleanedUp.asInstanceOf[ModuleDef]
}
- val mdef = wrapExpr(expr)
- val pdef = PackageDef(Ident(nme.EMPTY_PACKAGE_NAME), List(mdef))
+ val mdef = wrap(expr)
+ val pdef = PackageDef(Ident(mdef.name), List(mdef))
val unit = new CompilationUnit(NoSourceFile)
unit.body = pdef
@@ -229,12 +233,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get
val singleton = jfield.get(null)
- (singleton, jmeth)
- }
-
- def runExpr(expr: Tree): Any = {
- val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
- val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
// @odersky writes: Not sure we will be able to drop this. I forgot the reason why we dereference () functions,
// but there must have been one. So I propose to leave old version in comments to be resurrected if the problem resurfaces.
@@ -248,13 +246,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// val applyMeth = result.getClass.getMethod("apply")
// applyMeth.invoke(result)
// }
- val (singleton, jmeth) = compileExpr(expr)
- val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
- if (jmeth.getReturnType == java.lang.Void.TYPE) ()
- else result
+ () => {
+ val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
+ if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else result
+ }
}
- def parseExpr(code: String): Tree = {
+ def parse(code: String): Tree = {
val run = new Run
reporter.reset()
val wrappedCode = "object wrapper {" + EOL + code + EOL + "}"
@@ -327,7 +326,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val importer = compiler.mkImporter(u)
lazy val exporter = importer.reverse
- lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, mirror.classLoader)
def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = {
if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
@@ -335,7 +333,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
var cexpectedType: compiler.Type = importer.importType(expectedType)
if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
- val ttree: compiler.Tree = compiler.typeCheckExpr(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
+ val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
val uttree = exporter.importTree(ttree)
uttree
}
@@ -378,20 +376,22 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
- def parseExpr(code: String): u.Tree = {
+ def parse(code: String): u.Tree = {
if (compiler.settings.verbose.value) println("parsing "+code)
- val ctree: compiler.Tree = compiler.parseExpr(code)
+ val ctree: compiler.Tree = compiler.parse(code)
val utree = exporter.importTree(ctree)
utree
}
- def runExpr(tree: u.Tree): Any = {
+ def compile(tree: u.Tree): () => Any = {
if (compiler.settings.verbose.value) println("importing "+tree)
var ctree: compiler.Tree = importer.importTree(tree)
- if (compiler.settings.verbose.value) println("running "+ctree)
- compiler.runExpr(ctree)
+ if (compiler.settings.verbose.value) println("compiling "+ctree)
+ compiler.compile(ctree)
}
+
+ def eval(tree: u.Tree): Any = compile(tree)()
}
}
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 3008930823..901071d91a 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -6,7 +6,7 @@
package scala.tools
import scala.reflect.api.JavaUniverse
-import language.implicitConversions
+import scala.language.implicitConversions
package object reflect extends FrontEnds {
// [todo: can we generalize this?
@@ -27,7 +27,7 @@ package reflect {
def eval: T = {
val factory = new ToolBoxFactory[JavaUniverse](expr.mirror.universe) { val mirror = expr.mirror.asInstanceOf[this.u.Mirror] }
val toolBox = factory.mkToolBox()
- toolBox.runExpr(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
+ toolBox.eval(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
}
}
}
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index ce149a5aa0..70f71a222a 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.ScalaClassLoader
import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
import scala.tools.nsc.io.File
import Javap._
-import language.reflectiveCalls
+import scala.language.reflectiveCalls
trait Javap {
def loader: ScalaClassLoader
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 739878c282..f6dc92f96e 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -13,7 +13,7 @@ import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
import nsc.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
-import language.postfixOps
+import scala.language.postfixOps
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SW/Classpath
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index e0e089d0b2..d208a9f9c2 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -2,7 +2,7 @@ package scala.tools.util
import scala.tools.nsc.io._
import java.net.URLClassLoader
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
object VerifyClass {
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
index 910ca60eb0..37e3f45a4f 100644
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -8,7 +8,7 @@
package scala.util.continuations
-import annotation.{ Annotation, StaticAnnotation, TypeConstraint }
+import scala.annotation.{ Annotation, StaticAnnotation, TypeConstraint }
/** This annotation is used to mark a parameter as part of a continuation
* context.
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
index 54b8fb100e..18944a152d 100644
--- a/src/detach/library/scala/remoting/Channel.scala
+++ b/src/detach/library/scala/remoting/Channel.scala
@@ -116,7 +116,7 @@ class Channel protected (socket: Socket) {
* the expected type.
*/
@throws(classOf[ChannelException])
- def receive[T](implicit expected: reflect.ClassTag[T]): T = {
+ def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = {
val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
info("receive: found="+found+", expected="+expected)
import scala.reflect.ClassTag
@@ -144,11 +144,11 @@ class Channel protected (socket: Socket) {
/** <code>?</code> method may throw either an
* <code>ClassNotFoundException</code> or an <code>IOException</code>.
*/
- def ?[T](implicit t: reflect.ClassTag[T]): T = receive[T](t)
+ def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t)
/** <code>send</code> method may throw an <code>IOException</code>.
*/
- def send[T](x: T)(implicit t: reflect.ClassTag[T]) {
+ def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) {
out writeObject t
x match {
case x: Unit => // nop
@@ -168,7 +168,7 @@ class Channel protected (socket: Socket) {
/** <code>!</code> method may throw an <code>IOException</code>.
*/
- def ![T](x: T)(implicit m: reflect.ClassTag[T]) { send(x)(m) }
+ def ![T](x: T)(implicit m: scala.reflect.ClassTag[T]) { send(x)(m) }
def close() {
try { socket.close() }
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 85d2f9075e..a1e5e74e2f 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -22,6 +22,16 @@ import scala.collection.mutable.ListBuffer
*
* `args` returns the current command line arguments as an array.
*
+ * ==Caveats==
+ *
+ * '''''It should be noted that this trait is implemented using the [[DelayedInit]]
+ * functionality, which means that fields of the object will not have been initialized
+ * before the main method has been executed.'''''
+ *
+ * It should also be noted that the `main` method will not normally need to be overridden:
+ * the purpose is to turn the whole class body into the “main method”. You should only
+ * chose to override it if you know what you are doing.
+ *
* @author Martin Odersky
* @version 2.1, 15/02/2011
*/
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 7511b5309d..0b8550be37 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -11,7 +11,7 @@ package scala
import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
-import compat.Platform.arraycopy
+import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
@@ -48,6 +48,16 @@ class FallbackArrayBuilding {
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
+ val emptyBooleanArray = new Array[Boolean](0)
+ val emptyByteArray = new Array[Byte](0)
+ val emptyCharArray = new Array[Char](0)
+ val emptyDoubleArray = new Array[Double](0)
+ val emptyFloatArray = new Array[Float](0)
+ val emptyIntArray = new Array[Int](0)
+ val emptyLongArray = new Array[Long](0)
+ val emptyShortArray = new Array[Short](0)
+ val emptyObjectArray = new Array[Object](0)
+
implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
@@ -511,5 +521,5 @@ final class Array[T](_length: Int) extends java.io.Serializable with java.lang.C
*
* @return A clone of the Array.
*/
- override def clone: Array[T] = throw new Error()
+ override def clone(): Array[T] = throw new Error()
}
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 014928d986..440e546f19 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index 6f54f6cedf..df0d2c73b1 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Byte` are not
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index b681ae1693..1fa0c0d9e8 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Char` are not
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 510de92a2a..f058d7c26b 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Double` are not
diff --git a/src/library/scala/Dynamic.scala b/src/library/scala/Dynamic.scala
index faf834d310..3bcb2f1c90 100644
--- a/src/library/scala/Dynamic.scala
+++ b/src/library/scala/Dynamic.scala
@@ -9,11 +9,11 @@
package scala
/** A marker trait that enables dynamic invocations. Instances `x` of this
- * trait allow method invocations `x.meth(args)` for arbitrary method
- * names `meth` and argument lists `args` as well as field accesses
+ * trait allow method invocations `x.meth(args)` for arbitrary method
+ * names `meth` and argument lists `args` as well as field accesses
* `x.field` for arbitrary field names `field`.
*
- * If a call is not natively supported by `x` (i.e. if type checking
+ * If a call is not natively supported by `x` (i.e. if type checking
* fails), it is rewritten according to the following rules:
*
* {{{
@@ -23,12 +23,12 @@ package scala
* foo.field ~~> foo.selectDynamic("field")
* foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
* foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
- * foo.arr(10) ~~> foo.applyDynamics("arr")(10)
+ * foo.arr(10) ~~> foo.applyDynamic("arr")(10)
* }}}
*
* As of Scala 2.10, defining direct or indirect subclasses of this trait
* is only possible if the language feature `dynamics` is enabled.
*/
-trait Dynamic
+trait Dynamic extends Any
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index b9c116da0b..d942acec23 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Float` are not
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index 270581a3aa..d470f4c966 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -28,11 +28,11 @@ object Function {
/** Turns a function `A => Option[B]` into a `PartialFunction[A, B]`.
*
- * TODO: check if the paragraph below is still correct
* '''Important note''': this transformation implies the original function
- * will be called 2 or more times on each logical invocation, because the
+ * may be called 2 or more times on each logical invocation, because the
* only way to supply an implementation of `isDefinedAt` is to call the
* function and examine the return value.
+ * See also [[scala.PartialFunction]], method `applyOrElse`.
*
* @param f a function `T => Option[R]`
* @return a partial function defined for those inputs where
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index b2a4f93253..ae36413469 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Int` are not
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 40932a65a7..4ee9383c2a 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Long` are not
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index 491cd417a3..7697a7367a 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -12,7 +12,7 @@ import scala.collection.{ mutable, immutable, generic }
import mutable.WrappedArray
import immutable.WrappedString
import generic.CanBuildFrom
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `LowPriorityImplicits` class provides implicit values that
* are valid in all Scala compilation units without explicit qualification,
@@ -27,15 +27,20 @@ class LowPriorityImplicits {
* any potential conflicts. Conflicts do exist because the wrappers
* need to implement ScalaNumber in order to have a symmetric equals
* method, but that implies implementing java.lang.Number as well.
+ *
+ * Note - these are inlined because they are value classes, but
+ * the call to xxxWrapper is not eliminated even though it does nothing.
+ * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
+ * because maybe loading Predef has side effects!
*/
- implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
- implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
- implicit def intWrapper(x: Int) = new runtime.RichInt(x)
- implicit def charWrapper(c: Char) = new runtime.RichChar(c)
- implicit def longWrapper(x: Long) = new runtime.RichLong(x)
- implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
- implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
- implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+ @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
+ @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
+ @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
+ @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
+ @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
+ @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
+ @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
+ @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
// These eight implicits exist solely to exclude Null from the domain of
// the boxed types, so that e.g. "var x: Int = null" is a compile time
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index b7420f4447..945b0a0c3b 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -10,7 +10,7 @@ package scala
object Option {
- import language.implicitConversions
+ import scala.language.implicitConversions
/** An implicit conversion that converts an option to an iterable value
*/
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 7154b8da34..7c6e2d2e3e 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -67,7 +67,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
* of this partial function and `that`. The resulting partial function
* takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not.
*/
- def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] =
+ def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] =
new OrElse[A1, B1] (this, that)
//TODO: why not overload it with orElse(that: F1): F1?
@@ -78,10 +78,8 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
* @return a partial function with the same domain as this partial function, which maps
* arguments `x` to `k(this(x))`.
*/
- override def andThen[C](k: B => C) : PartialFunction[A, C] = new PartialFunction[A, C] {
- def isDefinedAt(x: A): Boolean = self isDefinedAt x
- def apply(x: A): C = k(self(x))
- }
+ override def andThen[C](k: B => C): PartialFunction[A, C] =
+ new AndThen[A, B, C] (this, k)
/** Turns this partial function into an plain function returning an `Option` result.
* @see Function.unlift
@@ -90,28 +88,54 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
*/
def lift: A => Option[B] = new Lifted(this)
- /**
- * TODO: comment
+ /** Applies this partial function to the given argument when it is contained in the function domain.
+ * Applies fallback function where this partial function is not defined.
+ *
+ * Note that expression `pf.applyOrElse(x, default)` is equivalent to
+ * {{{ if(pf isDefinedAt x) pf(x) else default(x) }}}
+ * except that `applyOrElse` method can be implemented more efficiently.
+ * For all partial function literals compiler generates `applyOrElse` implementation which
+ * avoids double evaluation of pattern matchers and guards.
+ * This makes `applyOrElse` the basis for the efficient implementation for many operations and scenarios, such as:
+ *
+ * - combining partial functions into `orElse`/`andThen` chains does not lead to
+ * excessive `apply`/`isDefinedAt` evaluation
+ * - `lift` and `unlift` do not evaluate source functions twice on each invocation
+ * - `runWith` allows efficient imperative-style combining of partial functions
+ * with conditionally applied actions
+ *
+ * For non-literal partial function classes with nontrivial `isDefinedAt` method
+ * it is recommended to override `applyOrElse` with custom implementation that avoids
+ * double `isDefinedAt` evaluation. This may result in better performance
+ * and more predictable behavior w.r.t. side effects.
+ *
+ * @param x the function argument
+ * @param default the fallback function
+ * @return the result of this function or fallback function application.
* @since 2.10
*/
def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
if (isDefinedAt(x)) apply(x) else default(x)
- /**
- * TODO: comment
- * @since 2.10
- */
- def run[U](x: A)(action: B => U): Boolean =
- applyOrElse(x, fallbackToken) match {
- case FallbackToken => false
- case z => action(z); true
- }
-
- /**
- * TODO: comment
+ /** Composes this partial function with an action function which
+ * gets applied to results of this partial function.
+ * The action function is invoked only for its side effects; its result is ignored.
+ *
+ * Note that expression `pf.runWith(action)(x)` is equivalent to
+ * {{{ if(pf isDefinedAt x) { action(pf(x)); true } else false }}}
+ * except that `runWith` is implemented via `applyOrElse` and thus potentially more efficient.
+ * Using `runWith` avoids double evaluation of pattern matchers and guards for partial function literals.
+ * @see `applyOrElse`.
+ *
+ * @param action the action function
+ * @return a function which maps arguments `x` to `isDefinedAt(x)`. The resulting function
+ * runs `action(this(x))` where `this` is defined.
* @since 2.10
*/
- def runWith[U](action: B => U): A => Boolean = { x => run(x)(action) }
+ def runWith[U](action: B => U): A => Boolean = { x =>
+ val z = applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) { action(z); true } else false
+ }
}
/** A few handy operations which leverage the extra bit of information
@@ -137,11 +161,10 @@ object PartialFunction {
def apply(x: A): B = f1.applyOrElse(x, f2)
- override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- f1.applyOrElse(x, fallbackToken) match {
- case FallbackToken => f2.applyOrElse(x, default)
- case z => z
- }
+ override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
+ val z = f1.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) z else f2.applyOrElse(x, default)
+ }
override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) =
new OrElse[A1, B1] (f1, f2 orElse that)
@@ -150,23 +173,61 @@ object PartialFunction {
new OrElse[A, C] (f1 andThen k, f2 andThen k)
}
- private[scala] lazy val FallbackToken: PartialFunction[Any, PartialFunction[Any, Nothing]] = { case _ => FallbackToken.asInstanceOf[PartialFunction[Any, Nothing]] }
- private[scala] final def fallbackToken[B] = FallbackToken.asInstanceOf[PartialFunction[Any, B]]
- //TODO: check generated code for PF literal here
+ /** Composite function produced by `PartialFunction#andThen` method
+ */
+ private final class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
+ def isDefinedAt(x: A) = pf.isDefinedAt(x)
+
+ def apply(x: A): C = k(pf(x))
+
+ override def applyOrElse[A1 <: A, C1 >: C](x: A1, default: A1 => C1): C1 = {
+ val z = pf.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) k(z) else default(x)
+ }
+ }
+
+ /** To implement patterns like {{{ if(pf isDefinedAt x) f1(pf(x)) else f2(x) }}} efficiently
+ * the following trick is used:
+ *
+ * To avoid double evaluation of pattern matchers & guards `applyOrElse` method is used here
+ * instead of `isDefinedAt`/`apply` pair.
+ *
+ * After call to `applyOrElse` we need both the function result it returned and
+ * the fact if the function's argument was contained in its domain. The only degree of freedom we have here
+ * to achieve this goal is tweaking with the continuation argument (`default`) of `applyOrElse` method.
+ * The obvious way is to throw an exception from `default` function and to catch it after
+ * calling `applyOrElse` but I consider this somewhat inefficient.
+ *
+ * I know only one way how you can do this task efficiently: `default` function should return unique marker object
+ * which never may be returned by any other (regular/partial) function. This way after calling `applyOrElse` you need
+ * just one reference comparison to distinguish if `pf isDefined x` or not.
+ *
+ * This correctly interacts with specialization as return type of `applyOrElse`
+ * (which is parameterized upper bound) can never be specialized.
+ *
+ * Here `fallback_pf` is used as both unique marker object and special fallback function that returns it.
+ */
+ private[this] final val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf }
+ @inline private final def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]]
+ @inline private final def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
- private[scala] final class Lifted[-A, +B] (val pf: PartialFunction[A, B])
- extends runtime.AbstractFunction1[A, Option[B]] {
+ private final class Lifted[-A, +B] (val pf: PartialFunction[A, B])
+ extends scala.runtime.AbstractFunction1[A, Option[B]] {
- def apply(x: A): Option[B] = pf.applyOrElse(x, fallbackToken) match {
- case FallbackToken => None
- case z => Some(z)
+ def apply(x: A): Option[B] = {
+ val z = pf.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) Some(z) else None
}
}
- private final class Unlifted[A, B] (f: A => Option[B]) extends runtime.AbstractPartialFunction[A, B] {
+ private final class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
def isDefinedAt(x: A): Boolean = f(x).isDefined
- override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- f(x) getOrElse default(x) //TODO: check generated code and inline getOrElse if needed
+
+ override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
+ val z = f(x)
+ if (!z.isEmpty) z.get else default(x)
+ }
+
override def lift = f
}
@@ -178,7 +239,6 @@ object PartialFunction {
/** Converts ordinary function to partial one
* @since 2.10
*/
- //TODO: check generated code for PF literal here
def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) }
private[this] final val constFalse: Any => Boolean = { _ => false}
@@ -189,12 +249,11 @@ object PartialFunction {
override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
override def andThen[C](k: Nothing => C) = this
override val lift = (x: Any) => None
- override def run[U](x: Any)(action: Nothing => U) = false
override def runWith[U](action: Nothing => U) = constFalse
}
- /**
- * TODO: comment
+ /** The partial function with empty domain.
+ * Any attempt to invoke empty partial function leads to throwing [[scala.MatchError]] exception.
* @since 2.10
*/
def empty[A, B] : PartialFunction[A, B] = empty_pf
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index f3d36473dd..4792de6bf5 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -12,9 +12,9 @@ import scala.collection.{ mutable, immutable, generic }
import immutable.StringOps
import mutable.ArrayOps
import generic.CanBuildFrom
-import annotation.{ elidable, implicitNotFound }
-import annotation.elidable.ASSERTION
-import language.{implicitConversions, existentials}
+import scala.annotation.{ elidable, implicitNotFound }
+import scala.annotation.elidable.ASSERTION
+import scala.language.{implicitConversions, existentials}
/** The `Predef` object provides definitions that are accessible in all Scala
* compilation units without explicit qualification.
@@ -301,7 +301,7 @@ object Predef extends LowPriorityImplicits {
implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x)
implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x)
- implicit def seqToCharSequence(xs: collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
+ implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length)
implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 8c42c60d98..2c6838f6b3 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -35,7 +35,7 @@ trait Product extends Any with Equals {
/** An iterator over all the elements of this product.
* @return in the default implementation, an `Iterator[Any]`
*/
- def productIterator: Iterator[Any] = new collection.AbstractIterator[Any] {
+ def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] {
private var c: Int = 0
private val cmax = productArity
def hasNext = c < cmax
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index 0cb924c3d4..f59aa94bd7 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -12,4 +12,4 @@ package scala
* Annotation for specifying the `static SerialVersionUID` field
* of a serializable class.
*/
-class SerialVersionUID(uid: Long) extends annotation.StaticAnnotation
+class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 687b198a11..35c5fe3ff0 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Short` are not
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 723d95a499..1201b1accd 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -19,7 +19,7 @@ case class StringContext(parts: String*) {
import StringContext._
- /** Checks that the given arguments `args` number one less than the number
+ /** Checks that the length of the given argument `args` is one less than the number
* of `parts` supplied to the enclosing `StringContext`.
* @param `args` The arguments to be checked.
* @throws An `IllegalArgumentException` if this is not the case.
@@ -37,7 +37,7 @@ case class StringContext(parts: String*) {
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if if a `parts` string contains a backslash (`\`) character
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
def s(args: Any*): String = standardInterpolator(treatEscapes, args)
@@ -51,7 +51,7 @@ case class StringContext(parts: String*) {
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if if a `parts` string contains a backslash (`\`) character
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
def raw(args: Any*): String = standardInterpolator(identity, args)
@@ -96,13 +96,14 @@ case class StringContext(parts: String*) {
* string literally. This is achieved by replacing each such occurrence by the
* format specifier `%%`.
*/
- // The implementation is magically hardwired into `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // Using the mechanism implemented in `scala.tools.reflect.FastTrack`
def f(args: Any*): String = ??? // macro
}
object StringContext {
- /** An exception that is thrown if a string contains a backslash (`\`) character that
+ /** An exception that is thrown if a string contains a backslash (`\`) character
* that does not start a valid escape sequence.
* @param str The offending string
* @param idx The index of the offending backslash character in `str`.
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 4156071f29..dc67e60314 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala
index a56129fb96..c3a7f47e62 100644
--- a/src/library/scala/annotation/bridge.scala
+++ b/src/library/scala/annotation/bridge.scala
@@ -11,4 +11,4 @@ package scala.annotation
/** If this annotation is present on a method, it will be treated as a bridge method.
*/
@deprecated("Reconsider whether using this annotation will accomplish anything", "2.10.0")
-private[scala] class bridge extends annotation.StaticAnnotation
+private[scala] class bridge extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/cloneable.scala b/src/library/scala/annotation/cloneable.scala
index aa45e8325f..dc2031ba8d 100644
--- a/src/library/scala/annotation/cloneable.scala
+++ b/src/library/scala/annotation/cloneable.scala
@@ -12,4 +12,4 @@ package scala.annotation
* An annotation that designates the class to which it is applied as cloneable
*/
@deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
-class cloneable extends annotation.StaticAnnotation
+class cloneable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 18be2450f5..0b4f5ac4b2 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -22,7 +22,7 @@ import java.util.logging.Level
* @elidable(123) // annotation priority
* scalac -Xelide-below 456 // command line priority
* }}}
- *
+ *
* The method call will be replaced with an expression which depends on
* the type of the elided expression. In decreasing order of precedence:
*
@@ -33,10 +33,10 @@ import java.util.logging.Level
* T >: Null null
* T >: Nothing Predef.???
* }}}
- *
+ *
* Complete example:
{{{
- import annotation._, elidable._
+ import scala.annotation._, elidable._
object Test extends App {
def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 }
@@ -62,7 +62,7 @@ import java.util.logging.Level
* @author Paul Phillips
* @since 2.8
*/
-final class elidable(final val level: Int) extends annotation.StaticAnnotation {}
+final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation {}
/** This useless appearing code was necessary to allow people to use
* named constants for the elidable annotation. This is what it takes
diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala
index 0c6a5d610f..993e99d382 100644
--- a/src/library/scala/annotation/implicitNotFound.scala
+++ b/src/library/scala/annotation/implicitNotFound.scala
@@ -15,4 +15,4 @@ package scala.annotation
* @author Adriaan Moors
* @since 2.8.1
*/
-final class implicitNotFound(msg: String) extends annotation.StaticAnnotation {} \ No newline at end of file
+final class implicitNotFound(msg: String) extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/meta/beanGetter.scala b/src/library/scala/annotation/meta/beanGetter.scala
index 040a3f415a..48eccf9337 100644
--- a/src/library/scala/annotation/meta/beanGetter.scala
+++ b/src/library/scala/annotation/meta/beanGetter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class beanGetter extends annotation.StaticAnnotation
+final class beanGetter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/beanSetter.scala b/src/library/scala/annotation/meta/beanSetter.scala
index 45ea063169..c9f68449fc 100644
--- a/src/library/scala/annotation/meta/beanSetter.scala
+++ b/src/library/scala/annotation/meta/beanSetter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class beanSetter extends annotation.StaticAnnotation
+final class beanSetter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionClass.scala b/src/library/scala/annotation/meta/companionClass.scala
index 8e53f6caf9..d165f37bad 100644
--- a/src/library/scala/annotation/meta/companionClass.scala
+++ b/src/library/scala/annotation/meta/companionClass.scala
@@ -12,6 +12,6 @@ package scala.annotation.meta
* conversion method for it. Annotations `@companionClass` and `@companionMethod`
* control where an annotation on the implicit class will go. By default, annotations
* on an implicit class end up only on the class.
- *
+ *
*/
-final class companionClass extends annotation.StaticAnnotation
+final class companionClass extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionMethod.scala b/src/library/scala/annotation/meta/companionMethod.scala
index 379c4f3385..c069b47f04 100644
--- a/src/library/scala/annotation/meta/companionMethod.scala
+++ b/src/library/scala/annotation/meta/companionMethod.scala
@@ -12,6 +12,6 @@ package scala.annotation.meta
* conversion method for it. Annotations `@companionClass` and `@companionMethod`
* control where an annotation on the implicit class will go. By default, annotations
* on an implicit class end up only on the class.
- *
+ *
*/
-final class companionMethod extends annotation.StaticAnnotation
+final class companionMethod extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionObject.scala b/src/library/scala/annotation/meta/companionObject.scala
index d329df5c42..5bd58f6365 100644
--- a/src/library/scala/annotation/meta/companionObject.scala
+++ b/src/library/scala/annotation/meta/companionObject.scala
@@ -11,4 +11,4 @@ package scala.annotation.meta
* Currently unused; intended as an annotation target for classes such as case classes
* that automatically generate a companion object
*/
-final class companionObject extends annotation.StaticAnnotation
+final class companionObject extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/field.scala b/src/library/scala/annotation/meta/field.scala
index 78f4a98544..96ed13abc4 100644
--- a/src/library/scala/annotation/meta/field.scala
+++ b/src/library/scala/annotation/meta/field.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class field extends annotation.StaticAnnotation
+final class field extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/getter.scala b/src/library/scala/annotation/meta/getter.scala
index 07e4512f00..0a28a5bb52 100644
--- a/src/library/scala/annotation/meta/getter.scala
+++ b/src/library/scala/annotation/meta/getter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class getter extends annotation.StaticAnnotation
+final class getter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/languageFeature.scala b/src/library/scala/annotation/meta/languageFeature.scala
index 23acc01b51..2e0ddb91cf 100644
--- a/src/library/scala/annotation/meta/languageFeature.scala
+++ b/src/library/scala/annotation/meta/languageFeature.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* An annotation giving particulars for a language feature in object `scala.language`.
*/
-final class languageFeature(feature: String, enableRequired: Boolean) extends annotation.StaticAnnotation
+final class languageFeature(feature: String, enableRequired: Boolean) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/param.scala b/src/library/scala/annotation/meta/param.scala
index d9ebcc76d3..ef535d79c6 100644
--- a/src/library/scala/annotation/meta/param.scala
+++ b/src/library/scala/annotation/meta/param.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class param extends annotation.StaticAnnotation
+final class param extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/setter.scala b/src/library/scala/annotation/meta/setter.scala
index c27cee2985..87ee2e28f4 100644
--- a/src/library/scala/annotation/meta/setter.scala
+++ b/src/library/scala/annotation/meta/setter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class setter extends annotation.StaticAnnotation
+final class setter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index 8ab12a7c8e..f60c827620 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -24,7 +24,7 @@ package scala.annotation
*
* @since 2.8
*/
- private[scala] final class migration(message: String, changedIn: String) extends annotation.StaticAnnotation {
+ private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation {
@deprecated("Use the constructor taking two Strings instead.", "2.10")
def this(majorVersion: Int, minorVersion: Int, message: String) = this(message, majorVersion + "." + minorVersion)
- } \ No newline at end of file
+ }
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
index 5a0d1261d6..e300ae9010 100644
--- a/src/library/scala/annotation/serializable.scala
+++ b/src/library/scala/annotation/serializable.scala
@@ -12,4 +12,4 @@ package scala.annotation
* An annotation that designates the class to which it is applied as serializable
*/
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-class serializable extends annotation.StaticAnnotation
+class serializable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/static.scala b/src/library/scala/annotation/static.scala
deleted file mode 100644
index f2955c756c..0000000000
--- a/src/library/scala/annotation/static.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that marks a member in the companion object as static
- * and ensures that the compiler generates static fields/methods for it.
- * This is important for Java interoperability and performance reasons.
- *
- * @since 2.10
- */
-final class static extends StaticAnnotation {
- // TODO document exact semantics above!
-}
diff --git a/src/library/scala/annotation/strictfp.scala b/src/library/scala/annotation/strictfp.scala
index e4efa6e663..4c33ea9678 100644
--- a/src/library/scala/annotation/strictfp.scala
+++ b/src/library/scala/annotation/strictfp.scala
@@ -15,4 +15,4 @@ package scala.annotation
* @version 2.9
* @since 2.9
*/
-class strictfp extends annotation.StaticAnnotation
+class strictfp extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala
index ee068f50d4..a867783455 100644
--- a/src/library/scala/annotation/switch.scala
+++ b/src/library/scala/annotation/switch.scala
@@ -26,4 +26,4 @@ package scala.annotation
* @author Paul Phillips
* @since 2.8
*/
-final class switch extends annotation.StaticAnnotation
+final class switch extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/tailrec.scala b/src/library/scala/annotation/tailrec.scala
index 14775535e8..020f0c4325 100644
--- a/src/library/scala/annotation/tailrec.scala
+++ b/src/library/scala/annotation/tailrec.scala
@@ -16,4 +16,4 @@ package scala.annotation
*
* @since 2.8
*/
-final class tailrec extends annotation.StaticAnnotation
+final class tailrec extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/unchecked/uncheckedStable.scala b/src/library/scala/annotation/unchecked/uncheckedStable.scala
index 13b500fe44..8162a3ab11 100644
--- a/src/library/scala/annotation/unchecked/uncheckedStable.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedStable.scala
@@ -12,4 +12,4 @@ package scala.annotation.unchecked
*
* @since 2.7
*/
-final class uncheckedStable extends annotation.StaticAnnotation {}
+final class uncheckedStable extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
index 51433be79f..61a0ebc6b8 100644
--- a/src/library/scala/annotation/unchecked/uncheckedVariance.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
@@ -12,4 +12,4 @@ package scala.annotation.unchecked
*
* @since 2.7
*/
-final class uncheckedVariance extends annotation.StaticAnnotation {}
+final class uncheckedVariance extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/unspecialized.scala b/src/library/scala/annotation/unspecialized.scala
index 28d9aa169c..717ca1597d 100644
--- a/src/library/scala/annotation/unspecialized.scala
+++ b/src/library/scala/annotation/unspecialized.scala
@@ -14,4 +14,4 @@ package scala.annotation
*
* @since 2.10
*/
-class unspecialized extends annotation.StaticAnnotation
+class unspecialized extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/varargs.scala b/src/library/scala/annotation/varargs.scala
index 1d7a2f7183..b44f8c505e 100644
--- a/src/library/scala/annotation/varargs.scala
+++ b/src/library/scala/annotation/varargs.scala
@@ -14,4 +14,4 @@ package scala.annotation
*
* @since 2.9
*/
-final class varargs extends annotation.StaticAnnotation
+final class varargs extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/BeanDescription.scala b/src/library/scala/beans/BeanDescription.scala
index d6c9b0c736..5e1d3b873e 100644
--- a/src/library/scala/beans/BeanDescription.scala
+++ b/src/library/scala/beans/BeanDescription.scala
@@ -15,5 +15,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanDescription(val description: String) extends annotation.Annotation
+class BeanDescription(val description: String) extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanDisplayName.scala b/src/library/scala/beans/BeanDisplayName.scala
index fbbfa08ffc..2c862e3700 100644
--- a/src/library/scala/beans/BeanDisplayName.scala
+++ b/src/library/scala/beans/BeanDisplayName.scala
@@ -14,5 +14,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanDisplayName(val name: String) extends annotation.Annotation
+class BeanDisplayName(val name: String) extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala
index 1a1d8defa4..23a55edfc5 100644
--- a/src/library/scala/beans/BeanInfo.scala
+++ b/src/library/scala/beans/BeanInfo.scala
@@ -17,4 +17,4 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanInfo extends annotation.Annotation
+class BeanInfo extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanInfoSkip.scala b/src/library/scala/beans/BeanInfoSkip.scala
index 23adf74924..f08dde99d9 100644
--- a/src/library/scala/beans/BeanInfoSkip.scala
+++ b/src/library/scala/beans/BeanInfoSkip.scala
@@ -15,4 +15,4 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanInfoSkip extends annotation.Annotation
+class BeanInfoSkip extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanProperty.scala b/src/library/scala/beans/BeanProperty.scala
index 4a2fb716c7..ab63e92c6f 100644
--- a/src/library/scala/beans/BeanProperty.scala
+++ b/src/library/scala/beans/BeanProperty.scala
@@ -23,4 +23,4 @@ package scala.beans
* use the `scala.beans.BooleanBeanProperty` annotation instead.
*/
@scala.annotation.meta.field
-class BeanProperty extends annotation.StaticAnnotation
+class BeanProperty extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/BooleanBeanProperty.scala b/src/library/scala/beans/BooleanBeanProperty.scala
index 1c85a88c84..972d8fb77e 100644
--- a/src/library/scala/beans/BooleanBeanProperty.scala
+++ b/src/library/scala/beans/BooleanBeanProperty.scala
@@ -13,4 +13,4 @@ package scala.beans
* named `isFieldName` instead of `getFieldName`.
*/
@scala.annotation.meta.field
-class BooleanBeanProperty extends annotation.StaticAnnotation
+class BooleanBeanProperty extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index dc634c67d3..a56cb5da59 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -10,7 +10,7 @@ package scala.collection
import parallel.Combiner
-trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Parallelizable[A, ParRepr] {
+trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Any with Parallelizable[A, ParRepr] {
override def par: ParRepr
override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("")
}
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index 9e3927eaf4..142561df20 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -25,6 +25,7 @@ self =>
def iterator: Iterator[B]
override def foreach[U](f: B => U): Unit = iterator foreach f
override def toString = viewToString
+ override def isEmpty = !iterator.hasNext
}
trait EmptyView extends Transformed[Nothing] with super.EmptyView {
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index b6c90d4d2a..3ea45e3810 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -31,7 +31,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
// This hash code must be symmetric in the contents but ought not
// collide trivially.
- override def hashCode() = util.hashing.MurmurHash3.mapHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq)
/** Returns the value associated with a key, or a default value if the key is not contained in the map.
* @param key the key.
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index a77cb05960..6380e9380a 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -116,7 +116,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * @return the index of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
@@ -132,12 +132,12 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
- * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
* @inheritdoc
- *
+ *
* $mayNotTerminateInf
*
*/
@@ -147,7 +147,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * @return the index of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
@@ -163,7 +163,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @param end the end index.
* @tparam B the type of the element `elem`.
- * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
@@ -465,7 +465,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Hashcodes for $Coll produce a value from the hashcodes of all the
* elements of the $coll.
*/
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq)
/** The equals method for arbitrary sequences. Compares this sequence to
* some other object.
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index 18eb31da03..ef5f14ed55 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -127,5 +127,5 @@ extends GenIterableLike[A, Repr]
// Calling map on a set drops duplicates: any hashcode collisions would
// then be dropped before they can be added.
// Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.hashing.MurmurHash3.setHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq)
}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 9b04256c8d..987f124f55 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -10,7 +10,7 @@ package scala.collection
import generic._
-import annotation.migration
+import scala.annotation.migration
/** A template trait for all traversable collections upon which operations
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 4e0f71ee8b..a872bc0948 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -11,7 +11,7 @@ package scala.collection
import scala.reflect.ClassTag
import scala.collection.generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
-import language.higherKinds
+import scala.language.higherKinds
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
@@ -507,7 +507,7 @@ trait GenTraversableOnce[+A] extends Any {
* $willNotTerminateInf
* @return a buffer containing all elements of this $coll.
*/
- def toBuffer[A1 >: A]: collection.mutable.Buffer[A1]
+ def toBuffer[A1 >: A]: scala.collection.mutable.Buffer[A1]
/** Converts this $coll to an unspecified Traversable. Will return
* the same collection if this instance is already Traversable.
@@ -565,7 +565,7 @@ trait GenTraversableOnce[+A] extends Any {
/** Converts this $coll into another by copying all elements.
* @tparam Col The collection type to build.
* @return a new collection containing all elements of this $coll.
- *
+ *
* @usecase def to[Col[_]]: Col[A]
* @inheritdoc
* $willNotTerminateInf
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index f79a9d2c66..3858d60563 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -41,7 +41,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
self =>
def seq: IndexedSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 9d03a11db9..b471c304ab 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.ArrayBuffer
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index ac6d754f9e..ead5633e00 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-package scala.collection
-
+package scala
+package collection
import generic._
import immutable.{ List, Stream }
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index e0c8b21d09..d9ccb3f011 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import TraversableView.NoBuilder
import immutable.Stream
-import language.implicitConversions
+import scala.language.implicitConversions
/** A template trait for non-strict views of iterable collections.
* $iterableViewInfo
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 5f369de3b7..e12b8d231c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ArrayBuffer
-import annotation.migration
+import scala.annotation.migration
import immutable.Stream
import scala.collection.generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
@@ -393,7 +394,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def next() = if (hasNext) { hdDefined = false; hd } else empty.next()
}
-
+
/** Tests whether every element of this iterator relates to the
* corresponding element of another collection by satisfying a test predicate.
*
@@ -758,7 +759,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*
* @param elem the element to test.
* @return `true` if this iterator produces some value that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * is equal (as determined by `==`) to `elem`, `false` otherwise.
* @note Reuse: $consumesIterator
*/
def contains(elem: Any): Boolean = exists(_ == elem)
@@ -1140,7 +1141,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toStream: Stream[A] =
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
-
+
/** Converts this iterator to a string.
*
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index bfe27ef94a..b873ae964d 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
-import annotation.tailrec
+import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
@@ -50,7 +50,7 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
def seq: LinearSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
override /*IterableLike*/
def iterator: Iterator[A] = new AbstractIterator[A] {
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index ed2a877631..3877f07089 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import mutable.{ Builder, MapBuilder }
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.ParMap
/** A template trait for maps, which associate keys with values.
@@ -226,21 +226,21 @@ self =>
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
-
+
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
-
+
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
-
+
protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
@@ -248,7 +248,7 @@ self =>
override def contains(key: A) = self.contains(key)
def get(key: A) = self.get(key).map(f)
}
-
+
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 2fc3df0bd2..a3ff812024 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -6,13 +6,14 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.Ordering
+import scala.math.{ min, max, Ordering }
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 04ec4af830..b359044f0d 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import mutable.{ Builder, SetBuilder }
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.ParSet
/** A template trait for sets.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index b2051bf209..f5e479a514 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -10,10 +10,10 @@ package scala.collection
import generic._
import mutable.{ Builder }
-import annotation.{tailrec, migration, bridge}
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.{tailrec, migration, bridge}
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
-import language.higherKinds
+import scala.language.higherKinds
/** A template trait for traversable collections of type `Traversable[A]`.
*
@@ -501,7 +501,7 @@ trait TraversableLike[+A, +Repr] extends Any
else sliceWithKnownDelta(n, Int.MaxValue, -n)
def slice(from: Int, until: Int): Repr =
- sliceWithKnownBound(math.max(from, 0), until)
+ sliceWithKnownBound(scala.math.max(from, 0), until)
// Precondition: from >= 0, until > 0, builder already configured for building.
private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index fb73805cc5..d77d278fca 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -10,9 +10,9 @@ package scala.collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
import generic.CanBuildFrom
-import annotation.unchecked.{ uncheckedVariance => uV }
-import language.{implicitConversions, higherKinds}
-import reflect.ClassTag
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
+import scala.language.{implicitConversions, higherKinds}
+import scala.reflect.ClassTag
/** A template trait for collections which can be traversed either once only
* or one or more times.
@@ -374,12 +374,12 @@ object TraversableOnce {
implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
new FlattenOps[A](travs map ev)
-
+
/* Functionality reused in Iterator.CanBuildFrom */
private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
def traversableToColl[B](t: GenTraversable[B]): Coll[B]
-
+
def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
/** Creates a new builder on request of a collection.
@@ -398,7 +398,7 @@ object TraversableOnce {
*/
def apply() = newIterator
}
-
+
/** With the advent of `TraversableOnce`, it can be useful to have a builder which
* operates on `Iterator`s so they can be treated uniformly along with the collections.
* See `scala.util.Random.shuffle` or `scala.concurrent.Future.sequence` for an example.
@@ -407,10 +407,10 @@ object TraversableOnce {
def bufferToColl[B](buff: ArrayBuffer[B]) = buff.iterator
def traversableToColl[B](t: GenTraversable[B]) = t.seq
}
-
+
/** Evidence for building collections from `TraversableOnce` collections */
implicit def OnceCanBuildFrom[A] = new OnceCanBuildFrom[A]
-
+
class FlattenOps[A](travs: TraversableOnce[TraversableOnce[A]]) {
def flatten: Iterator[A] = new AbstractIterator[A] {
val its = travs.toIterator
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 74b30e0faf..b7be87b125 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -12,7 +12,7 @@ package scala.collection
import generic._
import mutable.{Buffer, StringBuilder}
-import reflect.ClassTag
+import scala.reflect.ClassTag
// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 7fbcf1374b..5ee32e90b2 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -11,8 +11,8 @@ package scala.collection
import generic._
import mutable.{ Builder, ArrayBuffer }
import TraversableView.NoBuilder
-import annotation.migration
-import language.implicitConversions
+import scala.annotation.migration
+import scala.language.implicitConversions
trait ViewMkString[+A] {
self: Traversable[A] =>
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 3d0aa6fd07..070497c19e 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -9,17 +9,14 @@
package scala.collection
package concurrent
-
-
import java.util.concurrent.atomic._
-import collection.immutable.{ ListMap => ImmutableListMap }
-import collection.parallel.mutable.ParTrieMap
-import util.hashing.Hashing
+import scala.collection.immutable.{ ListMap => ImmutableListMap }
+import scala.collection.parallel.mutable.ParTrieMap
+import scala.util.hashing.Hashing
+import scala.util.control.ControlThrowable
import generic._
-import annotation.tailrec
-import annotation.switch
-
-
+import scala.annotation.tailrec
+import scala.annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
@@ -647,22 +644,22 @@ extends scala.collection.concurrent.Map[K, V]
def hashing = hashingobj
def equality = equalityobj
@volatile var root = r
-
+
def this(hashf: Hashing[K], ef: Equiv[K]) = this(
INode.newRootNode,
AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"),
hashf,
ef
)
-
+
def this() = this(Hashing.default, Equiv.universal)
-
+
/* internal methods */
private def writeObject(out: java.io.ObjectOutputStream) {
out.writeObject(hashf)
out.writeObject(ef)
-
+
val it = iterator
while (it.hasNext) {
val (k, v) = it.next()
@@ -678,7 +675,7 @@ extends scala.collection.concurrent.Map[K, V]
hashingobj = in.readObject().asInstanceOf[Hashing[K]]
equalityobj = in.readObject().asInstanceOf[Equiv[K]]
-
+
var obj: AnyRef = null
do {
obj = in.readObject()
@@ -816,7 +813,7 @@ extends scala.collection.concurrent.Map[K, V]
*
* This method is used by other methods such as `size` and `iterator`.
*/
- @tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
+ @tailrec final def readOnlySnapshot(): scala.collection.Map[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality)
@@ -827,10 +824,10 @@ extends scala.collection.concurrent.Map[K, V]
val r = RDCSS_READ_ROOT()
if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
}
-
+
@inline
def computeHash(k: K) = hashingobj.hash(k)
-
+
final def lookup(k: K): V = {
val hc = computeHash(k)
lookuphc(k, hc).asInstanceOf[V]
@@ -917,11 +914,11 @@ object TrieMap extends MutableMapFactory[TrieMap] {
implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = new MapCanBuildFrom[K, V]
def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V]
-
+
class MangledHashing[K] extends Hashing[K] {
- def hash(k: K) = util.hashing.byteswap32(k.##)
+ def hash(k: K)= scala.util.hashing.byteswap32(k.##)
}
-
+
}
@@ -1058,7 +1055,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
}
-private[concurrent] object RestartException extends util.control.ControlThrowable
+private[concurrent] object RestartException extends ControlThrowable
/** Only used for ctrie serialization. */
@@ -1067,7 +1064,7 @@ private[concurrent] case object TrieMapSerializationEnd
private[concurrent] object Debug {
- import collection._
+ import scala.collection._
lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
@@ -1083,13 +1080,3 @@ private[concurrent] object Debug {
}
}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index bde13f2830..e14769739d 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -12,7 +12,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
import WrapAsJava._
-import language.implicitConversions
+import scala.language.implicitConversions
/** A collection of decorators that allow to convert between
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 539584b148..4ee7e2d1c7 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -12,7 +12,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
import WrapAsScala._
-import language.implicitConversions
+import scala.language.implicitConversions
trait DecorateAsScala {
/**
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index fcfe402a68..c79c651e96 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -11,7 +11,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Wrappers._
-import language.implicitConversions
+import scala.language.implicitConversions
trait WrapAsJava {
/**
@@ -253,7 +253,7 @@ trait WrapAsJava {
case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped
case _ => new ConcurrentMapDeprecatedWrapper(m)
}
-
+
/**
* Implicitly converts a Scala mutable `concurrent.Map` to a Java
* `ConcurrentMap`.
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index c2994a0986..6ef4243d0d 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -11,7 +11,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Wrappers._
-import language.implicitConversions
+import scala.language.implicitConversions
trait LowPriorityWrapAsScala {
this: WrapAsScala =>
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 75707b69b0..a459aa15be 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -96,6 +96,9 @@ private[collection] trait Wrappers {
def remove(i: Int) = underlying.remove(i)
def clear() = underlying.clear()
def result = this
+ // Note: Clone cannot just call underlying.clone because in Java, only specific collections
+ // expose clone methods. Generically, they're protected.
+ override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
@@ -149,6 +152,10 @@ private[collection] trait Wrappers {
override def clear() = underlying.clear()
override def empty = JSetWrapper(new ju.HashSet[A])
+ // Note: Clone cannot just call underlying.clone because in Java, only specific collections
+ // expose clone methods. Generically, they're protected.
+ override def clone() =
+ new JSetWrapper[A](new ju.LinkedHashSet[A](underlying))
}
class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
@@ -171,12 +178,12 @@ private[collection] trait Wrappers {
var prev : Option[A] = None
def hasNext = ui.hasNext
-
+
def next() = {
val (k, v) = ui.next
prev = Some(k)
new ju.Map.Entry[A, B] {
- import util.hashing.byteswap32
+ import scala.util.hashing.byteswap32
def getKey = k
def getValue = v
def setValue(v1 : B) = self.put(k, v1)
diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
index 95835d3e90..c9c75a5f23 100644
--- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
+++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
@@ -9,8 +9,8 @@
package scala.collection
package generic
-import language.higherKinds
-import reflect.ClassTag
+import scala.language.higherKinds
+import scala.reflect.ClassTag
/** A template for companion objects of `ClassTagTraversable` and
* subclasses thereof.
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index d79112d616..cebb4e69d3 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -14,7 +14,7 @@ package scala.collection.generic
*/
trait FilterMonadic[+A, +Repr] extends Any {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index 31fe4e100d..6ce99646e8 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index 19eeba9b1d..bb352f707c 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -11,7 +11,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index 4f812b337c..526927ce26 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Set` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 2aaf93de05..6b347db7a0 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
index 8cce592627..fd5a3bae4c 100644
--- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -10,8 +10,8 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
-import reflect.ClassTag
+import scala.language.higherKinds
+import scala.reflect.ClassTag
/** This class represents companions of classes which require ClassTags
* for their element types.
diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
index d368d0007b..d5d6c53c1e 100644
--- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
@@ -10,9 +10,9 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
-import reflect.ClassTag
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
+import scala.reflect.ClassTag
/** This trait represents collections classes which require class
* tags for their element types.
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 1844542315..d4e77f68f5 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of "regular" collection classes
* represent an unconstrained higher-kinded type. Typically
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index 290dc435c8..ba432f012a 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** This class represents companions of classes which require the ordered trait
* for their element types.
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index 6e04420315..b041670161 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -12,8 +12,8 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** This trait represents collections classes which require
* ordered element types.
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index 484da5c6d9..aea7d8f25a 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -11,7 +11,7 @@ package scala.collection.generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of parallel collection classes.
* They should be mixed in together with `GenericCompanion` type.
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index fc1c3f5eaa..3dfdc98133 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -13,8 +13,8 @@ import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
import scala.collection.parallel.TaskSupport
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** A template trait for collections having a companion.
*
@@ -29,7 +29,7 @@ extends GenericTraversableTemplate[A, CC]
{
def companion: GenericCompanion[CC] with GenericParCompanion[CC]
- protected[this] override def newBuilder: collection.mutable.Builder[A, CC[A]] = newCombiner
+ protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner
protected[this] override def newCombiner: Combiner[A, CC[A]] = {
val cb = companion.newCombiner[A]
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 90063c1ca2..63fca78a98 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
trait GenericSeqCompanion[CC[X] <: Traversable[X]]
- extends GenericCompanion[CC] \ No newline at end of file
+ extends GenericCompanion[CC]
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 221bcfb379..cf7259100d 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -8,7 +8,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/**
* @since 2.8
*/
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 7cb0e812d8..62e7061237 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -12,9 +12,9 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.migration
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
+import scala.annotation.migration
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** A template class for companion objects of ``regular`` collection classes
* that represent an unconstrained higher-kinded type.
@@ -128,7 +128,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* @usecase def flatten[B]: $Coll[B]
*
* @inheritdoc
- *
+ *
* The resulting collection's type will be guided by the
* static type of $coll. For example:
*
@@ -138,7 +138,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
*
* val ys = Set(List(1, 2, 3), List(3, 2, 1))
* // ys == Set(1, 2, 3)
- * }}}
+ * }}}
*/
def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[B] = {
val b = genericBuilder[B]
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index d893188e92..9448222568 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `immutable.Map` and subclasses thereof.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index 7bd5bf2ef8..b6dc85470f 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, SetBuilder }
-import language.higherKinds
+import scala.language.higherKinds
abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index f415a52b4d..19c52b77ed 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -11,7 +11,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `SortedMap` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index 1317bb4796..64f35c35c4 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -11,7 +11,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `SortedSet` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index 7288322903..efa9178740 100644
--- a/src/library/scala/collection/generic/IsTraversableLike.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -42,7 +42,7 @@ trait IsTraversableLike[Repr] {
}
object IsTraversableLike {
- import language.higherKinds
+ import scala.language.higherKinds
implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
new IsTraversableLike[String] {
@@ -50,7 +50,7 @@ object IsTraversableLike {
val conversion = implicitly[String => GenTraversableLike[Char, String]]
}
- implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
new IsTraversableLike[C[A0]] {
type A = A0
val conversion = conv
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
index b336553231..49675b4d5e 100644
--- a/src/library/scala/collection/generic/IsTraversableOnce.scala
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -45,7 +45,7 @@ trait IsTraversableOnce[Repr] {
}
object IsTraversableOnce {
- import language.higherKinds
+ import scala.language.higherKinds
implicit val stringRepr: IsTraversableOnce[String] { type A = Char } =
new IsTraversableOnce[String] {
@@ -53,7 +53,7 @@ object IsTraversableOnce {
val conversion = implicitly[String => GenTraversableOnce[Char]]
}
- implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
new IsTraversableOnce[C[A0]] {
type A = A0
val conversion = conv
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 89b67a6c18..d1ba252ba7 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -11,7 +11,7 @@
package scala.collection.generic
import scala.collection._
-import collection.mutable.Buffer
+import scala.collection.mutable.Buffer
/** This trait implements a forwarder for iterable objects. It forwards
* all calls to a different iterable object, except for
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index ce44ae9bf4..cbf5e06202 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -11,7 +11,7 @@ package generic
import mutable.{Builder, MapBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index 8b38b4ddd5..3b3d6d1946 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `mutable.Map` and subclasses thereof.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index f130489814..516cbd722d 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, GrowingBuilder }
-import language.higherKinds
+import scala.language.higherKinds
abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index 0e90ed999c..e5a69779f3 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
-import language.higherKinds
+import scala.language.higherKinds
/**
* @define Coll `mutable.SortedSet`
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index 92f166ae08..b3d096ccd2 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
abstract class OrderedTraversableFactory[CC[X] <: Traversable[X] with GenericOrderedTraversableTemplate[X, CC]]
extends GenericOrderedCompanion[CC] {
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 41dca8fbe9..6b59b6671c 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -10,7 +10,7 @@ package scala.collection.generic
import scala.collection.parallel.ParIterable
import scala.collection.parallel.Combiner
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of `ParIterable` and subclasses
* thereof. This class extends `TraversableFactory` and provides a set of
@@ -24,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]
extends GenTraversableFactory[CC]
with GenericParCompanion[CC] {
- //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C]
/** A generic implementation of the `CanCombineFrom` trait, which forwards
* all calls to `apply(from)` to the `genericParBuilder` method of the $coll
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 5aedf67924..fdf23581f7 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -12,7 +12,7 @@ import scala.collection.parallel.ParMap
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of `ParMap` and subclasses thereof.
* This class extends `TraversableFactory` and provides a set of operations
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 30a36a734a..e6db6f4721 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -8,11 +8,11 @@
package scala.collection.generic
-import collection.mutable.Builder
-import collection.parallel.Combiner
-import collection.parallel.ParSet
-import collection.parallel.ParSetLike
-import language.higherKinds
+import scala.collection.mutable.Builder
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.ParSet
+import scala.collection.parallel.ParSetLike
+import scala.language.higherKinds
/**
* @author Aleksandar Prokopec
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index 3f61de6ceb..e943b93ef0 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index 646e99dd1e..f386596c26 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
- extends GenSetFactory[CC] with GenericSeqCompanion[CC] \ No newline at end of file
+ extends GenSetFactory[CC] with GenericSeqCompanion[CC]
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 56033ca8d8..af56d06d60 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -32,7 +32,7 @@ private[collection] class SliceInterval private (val from: Int, val until: Int)
*/
def recalculate(_from: Int, _until: Int): SliceInterval = {
val lo = _from max 0
- val elems = math.min(_until - lo, width)
+ val elems = scala.math.min(_until - lo, width)
val start = from + lo
if (elems <= 0) new SliceInterval(from, from)
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index f038c8b09b..2781cbcc15 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of mutable.Map and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index bb261803a9..4abccd3827 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, SetBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of Set and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 254a6a224f..a09b92a75b 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 5c55c27983..62c1dc095b 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -11,7 +11,7 @@ package scala.collection.generic
import scala.collection._
import mutable.{ Buffer, StringBuilder }
import immutable.{ List, Stream }
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** This trait implements a forwarder for traversable objects. It forwards
* all calls to a different traversable, except for:
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 6eecb5e3ff..dd47b7ace6 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,7 +1,7 @@
package scala.collection
import generic.CanBuildFrom
-import language.higherKinds
+import scala.language.higherKinds
package object generic {
type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To]
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 0b297aeb45..a6e750e7ee 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
-import annotation.unchecked.{ uncheckedVariance=> uV }
+import scala.annotation.unchecked.{ uncheckedVariance=> uV }
import parallel.immutable.ParHashMap
/** This class implements immutable maps using a hash trie.
@@ -72,7 +73,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
}
private[collection] def computeHash(key: A) = improve(elemHashCode(key))
-
+
import HashMap.{Merger, MergeFunction, liftMerger}
private[collection] def get0(key: A, hash: Int, level: Int): Option[B] = None
@@ -88,22 +89,22 @@ class HashMap[A, +B] extends AbstractMap[A, B]
@deprecated("Use the `merged` method instead.", "2.10.0")
def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
+
/** Creates a new map which is the merge of this and the argument hash map.
- *
+ *
* Uses the specified collision resolution function if two keys are the same.
* The collision resolution function will always take the first argument from
* `this` hash map and the second from `that`.
- *
+ *
* The `merged` method is on average more performant than doing a traversal and reconstructing a
* new immutable hash map from scratch, or `++`.
- *
+ *
* @tparam B1 the value type of the other hash map
* @param that the other hash map
* @param mergef the merge function or null if the first key-value pair is to be picked
*/
def merged[B1 >: B](that: HashMap[A, B1])(mergef: MergeFunction[A, B1]): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
+
protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that
override def par = ParHashMap.fromTrie(this)
@@ -118,7 +119,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
* @since 2.3
*/
object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
-
+
private abstract class Merger[A, B] {
def apply(kv1: (A, B), kv2: (A, B)): (A, B)
def invert: Merger[A, B]
@@ -139,13 +140,36 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
def invert: Merger[A1, B1] = self
}
}
-
+
/** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
private object EmptyHashMap extends HashMap[Any, Nothing] { }
+ // utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code)
+ private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = {
+ val index0 = (hash0 >>> level) & 0x1f
+ val index1 = (hash1 >>> level) & 0x1f
+ if(index0 != index1) {
+ val bitmap = (1 << index0) | (1 << index1)
+ val elems = new Array[HashMap[A,B]](2)
+ if(index0 < index1) {
+ elems(0) = elem0
+ elems(1) = elem1
+ } else {
+ elems(0) = elem1
+ elems(1) = elem0
+ }
+ new HashTrieMap[A, B](bitmap, elems, size)
+ } else {
+ val elems = new Array[HashMap[A,B]](1)
+ val bitmap = (1 << index0)
+ elems(0) = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size)
+ new HashTrieMap[A, B](bitmap, elems, size)
+ }
+ }
+
// TODO: add HashMap2, HashMap3, ...
class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
@@ -183,30 +207,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
new HashMap1(nkv._1, hash, nkv._2, nkv)
}
} else {
- var thatindex = (hash >>> level) & 0x1f
- var thisindex = (this.hash >>> level) & 0x1f
if (hash != this.hash) {
// they have different hashes, but may collide at this level - find a level at which they don't
- var lvl = level
- var top: HashTrieMap[A, B1] = null
- var prev: HashTrieMap[A, B1] = null
- while (thisindex == thatindex) {
- val newlevel = new HashTrieMap[A, B1](1 << thisindex, new Array[HashMap[A, B1]](1), 2)
- if (prev ne null) prev.elems(0) = newlevel else top = newlevel
- prev = newlevel
- lvl += 5
- thatindex = (hash >>> lvl) & 0x1f
- thisindex = (this.hash >>> lvl) & 0x1f
- }
- val bottelems = new Array[HashMap[A,B1]](2)
- val ind = if (thisindex < thatindex) 1 else 0
- bottelems(1 - ind) = this
- bottelems(ind) = new HashMap1[A, B1](key, hash, value, kv)
- val bottom = new HashTrieMap[A,B1]((1 << thisindex) | (1 << thatindex), bottelems, 2)
- if (prev ne null) {
- prev.elems(0) = bottom
- top
- } else bottom
+ val that = new HashMap1[A, B1](key, hash, value, kv)
+ makeHashTrieMap[A,B1](this.hash, this, hash, that, level, 2)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
@@ -221,12 +225,13 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
// this method may be called multiple times in a multithreaded environment, but that's ok
private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = {
- that.updated0(key, hash, level, value, kv, if (merger ne null) merger.invert else null)
+ that.updated0(key, hash, level, value, kv, merger.invert)
}
}
private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV])
extends HashMap[A, B @uV] {
+ // assert(kvs.size > 1)
override def size = kvs.size
@@ -238,20 +243,20 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value))
else new HashMapCollision1(hash, kvs + merger((key, kvs(key)), kv))
} else {
- var m: HashMap[A,B1] = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0)
- // might be able to save some ops here, but it doesn't seem to be worth it
- for ((k,v) <- kvs)
- m = m.updated0(k, this.hash, level, v, null, merger)
- m.updated0(key, hash, level, value, kv, merger)
+ val that = new HashMap1(key, hash, value, kv)
+ makeHashTrieMap(this.hash, this, hash, that, level, size + 1)
}
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (!kvs1.isEmpty)
- new HashMapCollision1(hash, kvs1)
- else
+ if (kvs1.isEmpty)
HashMap.empty[A,B]
+ else if(kvs1.tail.isEmpty) {
+ val kv = kvs1.head
+ new HashMap1[A,B](kv._1,hash,kv._2,kv)
+ } else
+ new HashMapCollision1(hash, kvs1)
} else this
override def iterator: Iterator[(A,B)] = kvs.iterator
@@ -275,6 +280,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
private[collection] val size0: Int
) extends HashMap[A, B @uV] {
+ // assert(Integer.bitCount(bitmap) == elems.length)
+ // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]]))
+
/*
def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
@@ -347,9 +355,14 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
Array.copy(elems, 0, elemsNew, 0, offset)
Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
val sizeNew = size - sub.size
- new HashTrieMap(bitmapNew, elemsNew, sizeNew)
+ if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]])
+ elemsNew(0)
+ else
+ new HashTrieMap(bitmapNew, elemsNew, sizeNew)
} else
HashMap.empty[A,B]
+ } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) {
+ subNew
} else {
val elemsNew = new Array[HashMap[A,B]](elems.length)
Array.copy(elems, 0, elemsNew, 0, elems.length)
@@ -480,7 +493,7 @@ time { mNew.iterator.foreach( p => ()) }
}
new HashTrieMap[A, B1](this.bitmap | that.bitmap, merged, totalelems)
- case hm: HashMapCollision1[_, _] => that.merge0(this, level, if (merger ne null) merger.invert else null)
+ case hm: HashMapCollision1[_, _] => that.merge0(this, level, merger.invert)
case hm: HashMap[_, _] => this
case _ => sys.error("section supposed to be unreachable.")
}
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index ef0173337c..2ebeb044fc 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -8,12 +8,13 @@
-package scala.collection
+package scala
+package collection
package immutable
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import generic._
-import collection.parallel.immutable.ParHashSet
+import scala.collection.parallel.immutable.ParHashSet
/** This class implements immutable sets using a hash trie.
*
@@ -102,6 +103,30 @@ object HashSet extends ImmutableSetFactory[HashSet] {
private object EmptyHashSet extends HashSet[Any] { }
+ // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
+ private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = {
+ val index0 = (hash0 >>> level) & 0x1f
+ val index1 = (hash1 >>> level) & 0x1f
+ if(index0 != index1) {
+ val bitmap = (1 << index0) | (1 << index1)
+ val elems = new Array[HashSet[A]](2)
+ if(index0 < index1) {
+ elems(0) = elem0
+ elems(1) = elem1
+ } else {
+ elems(0) = elem1
+ elems(1) = elem0
+ }
+ new HashTrieSet[A](bitmap, elems, elem0.size + elem1.size)
+ } else {
+ val elems = new Array[HashSet[A]](1)
+ val bitmap = (1 << index0)
+ val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5)
+ elems(0) = child
+ new HashTrieSet[A](bitmap, elems, child.size)
+ }
+ }
+
// TODO: add HashSet2, HashSet3, ...
class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] {
@@ -114,9 +139,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
- //new HashTrieSet[A](level+5, this, new HashSet1(key, hash))
- val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc
- m.updated0(this.key, this.hash, level).updated0(key, hash, level)
+ makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashSetCollision1(hash, ListSet.empty + this.key + key)
@@ -140,13 +163,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
- else {
- var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0)
- // might be able to save some ops here, but it doesn't seem to be worth it
- for (k <- ks)
- m = m.updated0(k, this.hash, level)
- m.updated0(key, hash, level)
- }
+ else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
@@ -181,6 +198,9 @@ object HashSet extends ImmutableSetFactory[HashSet] {
class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int)
extends HashSet[A] {
+ assert(Integer.bitCount(bitmap) == elems.length)
+ // assertion has to remain disabled until SI-6197 is solved
+ // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]]))
override def size = size0
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index e895c94599..d0f6b4b3ac 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 87b58005cf..7a489bb100 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
-import annotation.tailrec
+import scala.annotation.tailrec
import java.io._
/** A class for immutable linked lists representing ordered collections
@@ -229,7 +229,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* }}}
*/
override def slice(from: Int, until: Int): List[A] = {
- val lo = math.max(from, 0)
+ val lo = scala.math.max(from, 0)
if (until <= lo || isEmpty) Nil
else this drop lo take (until - lo)
}
@@ -330,7 +330,7 @@ case object Nil extends List[Nothing] {
throw new UnsupportedOperationException("tail of empty list")
// Removal of equals method here might lead to an infinite recursion similar to IntMap.equals.
override def equals(that: Any) = that match {
- case that1: collection.GenSeq[_] => that1.isEmpty
+ case that1: scala.collection.GenSeq[_] => that1.isEmpty
case _ => false
}
}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 091443f909..13282101b3 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -12,7 +12,7 @@ package scala.collection
package immutable
import generic._
-import annotation.{tailrec, bridge}
+import scala.annotation.{tailrec, bridge}
/** $factoryInfo
* @since 1
@@ -121,12 +121,12 @@ extends AbstractMap[A, B]
def hasNext = !self.isEmpty
def next(): (A,B) =
if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.next; res }
+ else { val res = (self.key, self.value); self = self.tail; res }
}.toList.reverseIterator
protected def key: A = throw new NoSuchElementException("empty map")
protected def value: B = throw new NoSuchElementException("empty map")
- protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
+ override def tail: ListMap[A, B] = throw new NoSuchElementException("empty map")
/** This class represents an entry in the `ListMap`.
*/
@@ -140,7 +140,7 @@ extends AbstractMap[A, B]
override def size: Int = size0(this, 0)
// to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
+ @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.tail, acc + 1)
/** Is this an empty map?
*
@@ -157,7 +157,7 @@ extends AbstractMap[A, B]
*/
override def apply(k: A): B1 = apply0(this, k)
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.next, k)
+ @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.tail, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
@@ -169,7 +169,7 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
- else if (cur.next.nonEmpty) get0(cur.next, k) else None
+ else if (cur.tail.nonEmpty) get0(cur.tail, k) else None
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
@@ -198,7 +198,7 @@ extends AbstractMap[A, B]
var lst: List[(A, B1)] = Nil
while (cur.nonEmpty) {
if (k != cur.key) lst ::= ((cur.key, cur.value))
- cur = cur.next
+ cur = cur.tail
}
var acc = ListMap[A, B1]()
while (lst != Nil) {
@@ -211,6 +211,6 @@ extends AbstractMap[A, B]
}
- override protected def next: ListMap[A, B1] = ListMap.this
+ override def tail: ListMap[A, B1] = ListMap.this
}
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 4dd0d62fc0..6dcdee8938 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -10,7 +10,7 @@ package scala.collection
package immutable
import generic._
-import annotation.{tailrec, bridge}
+import scala.annotation.{tailrec, bridge}
import mutable.{ ListBuffer, Builder }
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 002027b162..4899b45d5f 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
@@ -298,7 +299,7 @@ extends AbstractMap[Long, T]
if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
- case LongMap.Tip(key2, value2) =>
+ case LongMap.Tip(key2, value2) =>
if (key == key2) LongMap.Tip(key, f(value2, value))
else join(key, LongMap.Tip(key, value), key2, this)
case LongMap.Nil => LongMap.Tip(key, value)
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index e73da01ac4..17951e73fd 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -76,7 +76,7 @@ object Map extends ImmutableMapFactory[Map] {
def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]]
- class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
+ class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
override def empty = new WithDefault(underlying.empty, d)
override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d)
override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 7d373b7b39..3b4bfdc593 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -13,7 +13,7 @@ package immutable
import java.io._
import scala.util.matching.Regex
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** The `PagedSeq` object defines a lazy implementations of
* a random access sequence.
@@ -99,7 +99,7 @@ object PagedSeq {
/** Constructs a paged character sequence from a scala.io.Source value
*/
- def fromSource(source: io.Source) =
+ def fromSource(source: scala.io.Source) =
fromLines(source.getLines())
}
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index e980dda847..8d82f4932f 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -11,7 +11,7 @@ package immutable
import generic._
import mutable.{ Builder, ListBuffer }
-import annotation.tailrec
+import scala.annotation.tailrec
/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 7607837491..a2875ec3fb 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -43,9 +43,9 @@ import scala.collection.parallel.immutable.ParRange
*/
@SerialVersionUID(7618862778670199309L)
class Range(val start: Int, val end: Int, val step: Int)
-extends collection.AbstractSeq[Int]
+extends scala.collection.AbstractSeq[Int]
with IndexedSeq[Int]
- with collection.CustomParallelizable[Int, ParRange]
+ with scala.collection.CustomParallelizable[Int, ParRange]
with Serializable
{
override def par = new ParRange(this)
@@ -78,19 +78,19 @@ extends collection.AbstractSeq[Int]
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
-
+
override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
if (step > 0) start
else last
} else super.min(ord)
-
- override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
+
+ override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
if (step > 0) last
else start
} else super.max(ord)
-
+
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Create a new range with the `start` and `end` values of this range and
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 83eeaa45ee..a3ab27f814 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 6e3b77661b..bb489dd80a 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -8,11 +8,12 @@
-package scala.collection
+package scala
+package collection
package immutable
-import annotation.tailrec
-import annotation.meta.getter
+import scala.annotation.tailrec
+import scala.annotation.meta.getter
/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
*
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index f147b673f7..c0a1e0fec9 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
@@ -78,17 +78,17 @@ self =>
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
-
+
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
}
-
+
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
}
-
+
}
/** $factoryInfo
@@ -99,8 +99,8 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
-
- private[collection] trait Default[A, +B] extends SortedMap[A, B] with collection.SortedMap.Default[A, B] {
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] with scala.collection.SortedMap.Default[A, B] {
self =>
override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
val b = SortedMap.newBuilder[A, B1]
@@ -108,7 +108,7 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
b += ((kv._1, kv._2))
b.result
}
-
+
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 97707d4f7c..bac90341ec 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
import scala.annotation.tailrec
import Stream.cons
-import language.implicitConversions
+import scala.language.implicitConversions
/** The class `Stream` implements lazy lists where elements
* are only evaluated when they are needed. Here is an example:
@@ -631,7 +631,7 @@ self =>
* // (5,6)
* }}}
*/
- override final def zip[A1 >: A, B, That](that: collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
+ override final def zip[A1 >: A, B, That](that: scala.collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)]
if (isStreamBuilder(bf)) asThat(
if (this.isEmpty || that.isEmpty) Stream.Empty
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 3fd92aaff9..236308da2e 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -50,20 +50,20 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
- protected override def newAppended[B >: A](that: collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
- protected override def newZipped[B](that: collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
+ protected override def newZipped[B](that: scala.collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: scala.collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
}
protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: collection.GenSeq[B], _replaced: Int): Transformed[B] = {
+ protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index af7662d2e0..4d28bf9518 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** A companion object for the `StringLike` containing some constants.
* @since 2.8
@@ -41,7 +41,7 @@ import StringLike._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-trait StringLike[+Repr] extends Any with collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] {
+trait StringLike[+Repr] extends Any with scala.collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] {
self =>
/** Creates a string builder buffer as builder for this class */
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index ead1a8c744..e8e904f1f9 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -11,7 +11,7 @@ package immutable
import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 }
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import scala.annotation.tailrec
/** Abandons any pretense of type safety for speed. You can't say I
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 4dfe147a65..98b5aa6d9f 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
-import compat.Platform
+import scala.compat.Platform
import scala.collection.generic._
import scala.collection.mutable.Builder
import scala.collection.parallel.immutable.ParVector
@@ -21,9 +22,9 @@ object Vector extends SeqFactory[Vector] {
private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
-
+
private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
-
+
@inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
VectorReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
index eec5f04fff..647fc04310 100644
--- a/src/library/scala/collection/immutable/package.scala
+++ b/src/library/scala/collection/immutable/package.scala
@@ -69,9 +69,9 @@ package immutable {
private def locationAfterN(n: Int) = (
if (n > 0) {
if (step > 0)
- math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
else
- math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
}
else start
)
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 9aea25f330..ad52daaad4 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 3034fc2bce..90b7ca03de 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -66,7 +66,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
val newarray = new Array[AnyRef](len)
- compat.Platform.arraycopy(array, 0, newarray, 0, size0)
+ scala.compat.Platform.arraycopy(array, 0, newarray, 0, size0)
array = newarray
}
}
@@ -93,7 +93,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* @return the updated buffer.
*/
override def ++=(xs: TraversableOnce[A]): this.type = xs match {
- case v: collection.IndexedSeqLike[_, _] =>
+ case v: scala.collection.IndexedSeqLike[_, _] =>
val n = v.length
ensureSize(size0 + n)
v.copyToArray(array.asInstanceOf[scala.Array[Any]], size0, n)
@@ -169,12 +169,6 @@ class ArrayBuffer[A](override protected val initialSize: Int)
result
}
- /** Return a clone of this buffer.
- *
- * @return an `ArrayBuffer` with the same elements.
- */
- override def clone(): ArrayBuffer[A] = new ArrayBuffer[A] ++= this
-
def result: ArrayBuffer[A] = this
/** Defines the prefix of the string representation.
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 04601845c4..172993c5c3 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -22,7 +22,7 @@ import generic._
* @version 2.8
* @since 2.8
*/
-trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
+trait ArrayLike[A, +Repr] extends Any with IndexedSeqOptimized[A, Repr] { self =>
/** Creates a possible nested `IndexedSeq` which consists of all the elements
* of this array. If the elements are arrays themselves, the `deep` transformation
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 21c2aaaec7..397f5bbefa 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -6,18 +6,15 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import compat.Platform.arraycopy
+import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime._
-
import parallel.mutable.ParArray
-
/** This class serves as a wrapper for `Array`s with all the operations found in
* indexed sequences. Where needed, instances of arrays are implicitly converted
* into this class.
@@ -36,7 +33,7 @@ import parallel.mutable.ParArray
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
+trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
private def elementClass: Class[_] =
arrayElementClass(repr.getClass)
@@ -64,9 +61,9 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
- b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
+ b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
b ++= asTrav(xs)
b.result
@@ -105,7 +102,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
object ArrayOps {
/** A class of `ArrayOps` for arrays containing reference types. */
- class ofRef[T <: AnyRef](override val repr: Array[T]) extends ArrayOps[T] with ArrayLike[T, Array[T]] {
+ final class ofRef[T <: AnyRef](override val repr: Array[T]) extends AnyVal with ArrayOps[T] with ArrayLike[T, Array[T]] {
override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr)
override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr)
@@ -117,7 +114,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `byte`s. */
- class ofByte(override val repr: Array[Byte]) extends ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
+final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr)
override protected[this] def toCollection(repr: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(repr)
@@ -129,7 +126,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `short`s. */
- class ofShort(override val repr: Array[Short]) extends ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
+final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr)
override protected[this] def toCollection(repr: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(repr)
@@ -141,7 +138,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `char`s. */
- class ofChar(override val repr: Array[Char]) extends ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
+final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr)
override protected[this] def toCollection(repr: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(repr)
@@ -153,7 +150,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `int`s. */
- class ofInt(override val repr: Array[Int]) extends ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
+final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr)
override protected[this] def toCollection(repr: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(repr)
@@ -165,7 +162,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `long`s. */
- class ofLong(override val repr: Array[Long]) extends ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
+final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr)
override protected[this] def toCollection(repr: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(repr)
@@ -177,7 +174,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `float`s. */
- class ofFloat(override val repr: Array[Float]) extends ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
+final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr)
override protected[this] def toCollection(repr: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(repr)
@@ -189,7 +186,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `double`s. */
- class ofDouble(override val repr: Array[Double]) extends ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
+final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr)
override protected[this] def toCollection(repr: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(repr)
@@ -201,7 +198,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `boolean`s. */
- class ofBoolean(override val repr: Array[Boolean]) extends ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
+final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
override protected[this] def toCollection(repr: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
@@ -213,7 +210,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays of `Unit` types. */
- class ofUnit(override val repr: Array[Unit]) extends ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
+final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
override protected[this] def toCollection(repr: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index d0eaee348b..60baf7b35b 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -89,6 +89,13 @@ extends AbstractSeq[A]
Array.copy(array, 0, xs, start, len1)
}
+ override def clone(): ArraySeq[A] = {
+ val cloned = array.clone.asInstanceOf[Array[AnyRef]]
+ new ArraySeq[A](length) {
+ override val array = cloned
+ }
+ }
+
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 8f834d265b..277d48c545 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** Factory object for the `ArrayStack` class.
*
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3274fe6194..91983ba0d2 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -13,7 +13,7 @@ package mutable
import generic._
import script._
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
/** A template trait for buffers of type `Buffer[A]`.
*
@@ -93,7 +93,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @throws IndexOutOfBoundsException if the index `n` is not in the valid range
* `0 <= n <= length`.
*/
- def insertAll(n: Int, elems: collection.Traversable[A])
+ def insertAll(n: Int, elems: scala.collection.Traversable[A])
/** Removes the element at a given index from this buffer.
*
@@ -252,4 +252,14 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
@migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
+
+ /** Return a clone of this buffer.
+ *
+ * @return a `Buffer` with the same elements.
+ */
+ override def clone(): This = {
+ val bf = newBuilder
+ bf ++= this
+ bf.result.asInstanceOf[This]
+ }
}
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index b6887df61e..6dec6b221e 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index 6daac3094a..a3c1b7213b 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -18,5 +18,5 @@ package mutable
* @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound.
*/
trait Cloneable[+A <: AnyRef] extends scala.Cloneable {
- override def clone: A = super.clone().asInstanceOf[A]
+ override def clone(): A = super.clone().asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index cba4e9725e..b7c5f07502 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -63,6 +63,13 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
}
override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList
+
+ // Accurately clone this collection. See SI-6296
+ override def clone(): DoubleLinkedList[A] = {
+ val builder = newBuilder
+ builder ++= this
+ builder.result
+ }
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index ebccacf976..feff48cca3 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This extensible class may be used as a basis for implementing double
* linked lists. Type variable `A` refers to the element type
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f6d4cc31b6..74f576b0f7 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -44,7 +44,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
import HashTable.powerOfTwo
@@ -109,7 +109,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- def findEntry(elem: A): Option[A] = {
+ protected def findEntry(elem: A): Option[A] = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -120,7 +120,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Checks whether an element is contained in the hash table. */
- def containsEntry(elem: A): Boolean = {
+ protected def containsEntry(elem: A): Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -133,7 +133,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** Add entry if not yet in table.
* @return Returns `true` if a new entry was added, `false` otherwise.
*/
- def addEntry(elem: A) : Boolean = {
+ protected def addEntry(elem: A) : Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry) {
@@ -150,7 +150,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- def removeEntry(elem: A) : Option[A] = {
+ protected def removeEntry(elem: A) : Option[A] = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
@@ -185,7 +185,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
None
}
- def iterator: Iterator[A] = new AbstractIterator[A] {
+ protected def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = {
while (i < table.length && (null == table(i))) i += 1
@@ -356,8 +356,8 @@ private[collection] object FlatHashTable {
*
* See SI-5293.
*/
- final def seedGenerator = new ThreadLocal[util.Random] {
- override def initialValue = new util.Random
+ final def seedGenerator = new ThreadLocal[scala.util.Random] {
+ override def initialValue = new scala.util.Random
}
/** The load factor for the hash table; must be < 500 (0.5)
@@ -365,7 +365,7 @@ private[collection] object FlatHashTable {
def defaultLoadFactor: Int = 450
final def loadFactorDenum = 1000
- def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
+ def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
@@ -397,7 +397,7 @@ private[collection] object FlatHashTable {
//h = h + (h << 4)
//h ^ (h >>> 10)
- val improved = util.hashing.byteswap32(hcode)
+ val improved= scala.util.hashing.byteswap32(hcode)
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index bf640cdb90..be85df3c28 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -49,7 +49,7 @@ extends AbstractMap[A, B]
type Entry = DefaultEntry[A, B]
override def empty: HashMap[A, B] = HashMap.empty[A, B]
- override def clear() = clearTable()
+ override def clear() { clearTable() }
override def size: Int = tableSize
def this() = this(null)
@@ -57,22 +57,23 @@ extends AbstractMap[A, B]
override def par = new ParHashMap[A, B](hashTableContents)
// contains and apply overridden to avoid option allocations.
- override def contains(key: A) = findEntry(key) != null
+ override def contains(key: A): Boolean = findEntry(key) != null
+
override def apply(key: A): B = {
val result = findEntry(key)
- if (result == null) default(key)
+ if (result eq null) default(key)
else result.value
}
def get(key: A): Option[B] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -85,9 +86,8 @@ extends AbstractMap[A, B]
}
def += (kv: (A, B)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -98,12 +98,12 @@ extends AbstractMap[A, B]
override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
/* Override to avoid tuple allocation in foreach */
- override def keySet: collection.Set[A] = new DefaultKeySet {
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet {
override def foreach[C](f: A => C) = foreachEntry(e => f(e.key))
}
/* Override to avoid tuple allocation in foreach */
- override def values: collection.Iterable[B] = new DefaultValuesIterable {
+ override def values: scala.collection.Iterable[B] = new DefaultValuesIterable {
override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
}
@@ -127,12 +127,19 @@ extends AbstractMap[A, B]
if (!isSizeMapDefined) sizeMapInitAndRebuild
} else sizeMapDisable
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ new Entry(key, value.asInstanceOf[B])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[B](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index e040d1e421..a5b636c83d 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import collection.parallel.mutable.ParHashSet
+import scala.collection.parallel.mutable.ParHashSet
/** This class implements mutable sets using a hashtable.
*
@@ -53,7 +53,7 @@ extends AbstractSet[A]
override def companion: GenericCompanion[HashSet] = HashSet
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = containsEntry(elem)
@@ -67,7 +67,9 @@ extends AbstractSet[A]
override def remove(elem: A): Boolean = removeEntry(elem).isDefined
- override def clear() = clearTable()
+ override def clear() { clearTable() }
+
+ override def iterator: Iterator[A] = super[FlatHashTable].iterator
override def foreach[U](f: A => U) {
var i = 0
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 97e794f06e..eb6717393b 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -32,6 +32,9 @@ package mutable
* @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
+ // Replacing Entry type parameter by abstract type member here allows to not expose to public
+ // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
+ // However, I'm afraid it's too late now for such breaking change.
import HashTable._
@transient protected var _loadFactor = defaultLoadFactor
@@ -52,7 +55,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
@@ -75,11 +78,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/**
- * Initializes the collection from the input stream. `f` will be called for each key/value pair
- * read from the input stream in the order determined by the stream. This is useful for
- * structures where iteration order is important (e.g. LinkedHashMap).
+ * Initializes the collection from the input stream. `readEntry` will be called for each
+ * entry to be read from the input stream.
*/
- private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
+ private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) {
in.defaultReadObject
_loadFactor = in.readInt()
@@ -100,35 +102,34 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B]))
+ addEntry(readEntry)
index += 1
}
}
/**
* Serializes the collection to the output stream by saving the load factor, collection
- * size, collection keys and collection values. `value` is responsible for providing a value
- * from an entry.
+ * size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
*
- * `foreach` determines the order in which the key/value pairs are saved to the stream. To
+ * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
* deserialize, `init` should be used.
*/
- private[collection] def serializeTo[B](out: java.io.ObjectOutputStream, value: Entry => B) {
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
- foreachEntry { entry =>
- out.writeObject(entry.key)
- out.writeObject(value(entry))
- }
+
+ foreachEntry(writeEntry)
}
/** Find entry with given key in table, null if not found.
*/
- protected def findEntry(key: A): Entry = {
- val h = index(elemHashCode(key))
+ protected def findEntry(key: A): Entry =
+ findEntry0(key, index(elemHashCode(key)))
+
+ private[this] def findEntry0(key: A, h: Int): Entry = {
var e = table(h).asInstanceOf[Entry]
while (e != null && !elemEquals(e.key, key)) e = e.next
e
@@ -138,7 +139,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* pre: no entry with same key exists
*/
protected def addEntry(e: Entry) {
- val h = index(elemHashCode(e.key))
+ addEntry0(e, index(elemHashCode(e.key)))
+ }
+
+ private[this] def addEntry0(e: Entry, h: Int) {
e.next = table(h).asInstanceOf[Entry]
table(h) = e
tableSize = tableSize + 1
@@ -147,6 +151,24 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
resize(2 * table.length)
}
+ /** Find entry with given key in table, or add new one if not found.
+ * May be somewhat faster then `findEntry`/`addEntry` pair as it
+ * computes entry's hash index only once.
+ * Returns entry found in table or null.
+ * New entries are created by calling `createNewEntry` method.
+ */
+ protected def findOrAddEntry[B](key: A, value: B): Entry = {
+ val h = index(elemHashCode(key))
+ val e = findEntry0(key, h)
+ if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
+ }
+
+ /** Creates new entry to be immediately inserted into the hashtable.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
+ */
+ protected def createNewEntry[B](key: A, value: B): Entry
+
/** Remove entry from table if present.
*/
protected def removeEntry(key: A) : Entry = {
@@ -195,7 +217,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/** Avoid iterator for a 2x faster traversal. */
- protected def foreachEntry[C](f: Entry => C) {
+ protected def foreachEntry[U](f: Entry => U) {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
@@ -401,7 +423,7 @@ private[collection] object HashTable {
*
* For performance reasons, we avoid this improvement.
* */
- val i = util.hashing.byteswap32(hcode)
+ val i= scala.util.hashing.byteswap32(hcode)
/* Jenkins hash
* for range 0-10000, output has the msb set to zero */
@@ -452,7 +474,7 @@ private[collection] object HashTable {
val seedvalue: Int,
val sizemap: Array[Int]
) {
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private[collection] def debugInformation = buildString {
append =>
append("Hash table contents")
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 3232179dbb..1a3b7119a9 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This class can be used as an adaptor to create mutable maps from
* immutable map implementations. Only method `empty` has
@@ -42,17 +42,17 @@ extends AbstractMap[A, B]
override def isDefinedAt(key: A) = imap.isDefinedAt(key)
- override def keySet: collection.Set[A] = imap.keySet
+ override def keySet: scala.collection.Set[A] = imap.keySet
override def keysIterator: Iterator[A] = imap.keysIterator
@migration("`keys` returns Iterable[A] rather than Iterator[A].", "2.8.0")
- override def keys: collection.Iterable[A] = imap.keys
+ override def keys: scala.collection.Iterable[A] = imap.keys
override def valuesIterator: Iterator[B] = imap.valuesIterator
@migration("`values` returns Iterable[B] rather than Iterator[B].", "2.8.0")
- override def values: collection.Iterable[B] = imap.values
+ override def values: scala.collection.Iterable[B] = imap.values
def iterator: Iterator[(A, B)] = imap.iterator
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index 5d4b4de7b2..b3fe95ef27 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -36,7 +36,7 @@ import generic._
* @define willNotTerminateInf
* @define mayNotTerminateInf
*/
-trait IndexedSeqLike[A, +Repr] extends scala.collection.IndexedSeqLike[A, Repr] { self =>
+trait IndexedSeqLike[A, +Repr] extends Any with scala.collection.IndexedSeqLike[A, Repr] { self =>
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
index 819d06476a..506d2d6736 100755
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -17,4 +17,4 @@ import generic._
*
* @since 2.8
*/
-trait IndexedSeqOptimized[A, +Repr] extends IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
+trait IndexedSeqOptimized[A, +Repr] extends Any with IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index a0de2ec8ad..ab3d0ec312 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
import TraversableView.NoBuilder
-import language.implicitConversions
+import scala.language.implicitConversions
/** A non-strict view of a mutable `IndexedSeq`.
* $viewInfo
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 5643e070f8..5028884a8e 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -67,23 +67,9 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) {
- val e = new Entry(key, value)
- addEntry(e)
- updateLinkedEntries(e)
- None
- } else {
- val v = e.value
- e.value = value
- Some(v)
- }
- }
-
- private def updateLinkedEntries(e: Entry) {
- if (firstEntry == null) firstEntry = e
- else { lastEntry.later = e; e.earlier = lastEntry }
- lastEntry = e
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
+ else { val v = e.value; e.value = value; Some(v) }
}
override def remove(key: A): Option[B] = {
@@ -143,7 +129,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
else Iterator.empty.next
}
- override def foreach[U](f: ((A, B)) => U) = {
+ override def foreach[U](f: ((A, B)) => U) {
var cur = firstEntry
while (cur ne null) {
f((cur.key, cur.value))
@@ -151,7 +137,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
- protected override def foreachEntry[C](f: Entry => C) {
+ protected override def foreachEntry[U](f: Entry => U) {
var cur = firstEntry
while (cur ne null) {
f(cur)
@@ -159,22 +145,29 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ val e = new Entry(key, value.asInstanceOf[B])
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
+
override def clear() {
clearTable()
firstEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
firstEntry = null
lastEntry = null
- init[B](in, { (key, value) =>
- val entry = new Entry(key, value)
- updateLinkedEntries(entry)
- entry
- })
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 3f789f9fa2..88bad5ff9b 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -19,6 +19,7 @@ import generic._
*
* @author Matthias Zenger
* @author Martin Odersky
+ * @author Pavel Pavlov
* @version 2.0, 31/12/2006
* @since 1
*
@@ -43,46 +44,82 @@ class LinkedHashSet[A] extends AbstractSet[A]
with Set[A]
with GenericSetTemplate[A, LinkedHashSet]
with SetLike[A, LinkedHashSet[A]]
- with FlatHashTable[A]
+ with HashTable[A, LinkedHashSet.Entry[A]]
with Serializable
{
override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet
- @transient private[this] var ordered = new ListBuffer[A]
+ type Entry = LinkedHashSet.Entry[A]
- override def size = tableSize
+ @transient protected var firstEntry: Entry = null
+ @transient protected var lastEntry: Entry = null
- def contains(elem: A): Boolean = containsEntry(elem)
+ override def size: Int = tableSize
+
+ def contains(elem: A): Boolean = findEntry(elem) ne null
def += (elem: A): this.type = { add(elem); this }
def -= (elem: A): this.type = { remove(elem); this }
- override def add(elem: A): Boolean =
- if (addEntry(elem)) { ordered += elem; true }
- else false
+ override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null
+
+ override def remove(elem: A): Boolean = {
+ val e = removeEntry(elem)
+ if (e eq null) false
+ else {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ true
+ }
+ }
- override def remove(elem: A): Boolean =
- removeEntry(elem) match {
- case None => false
- case _ => ordered -= elem; true
+ def iterator: Iterator[A] = new AbstractIterator[A] {
+ private var cur = firstEntry
+ def hasNext = cur ne null
+ def next =
+ if (hasNext) { val res = cur.key; cur = cur.later; res }
+ else Iterator.empty.next
+ }
+
+ override def foreach[U](f: A => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key)
+ cur = cur.later
}
+ }
- override def clear() {
- ordered.clear()
- clearTable()
+ protected override def foreachEntry[U](f: Entry => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
}
- override def iterator: Iterator[A] = ordered.iterator
+ protected def createNewEntry[B](key: A, dummy: B): Entry = {
+ val e = new Entry(key)
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
- override def foreach[U](f: A => U) = ordered foreach f
+ override def clear() {
+ clearTable()
+ firstEntry = null
+ }
- private def writeObject(s: java.io.ObjectOutputStream) {
- serializeTo(s)
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, { e => out.writeObject(e.key) })
}
private def readObject(in: java.io.ObjectInputStream) {
- ordered = new ListBuffer[A]
- init(in, ordered += _)
+ firstEntry = null
+ lastEntry = null
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], null))
}
}
@@ -93,5 +130,13 @@ class LinkedHashSet[A] extends AbstractSet[A]
object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A]
override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A]
+
+ /** Class for the linked hash set entry, used internally.
+ * @since 2.10
+ */
+ private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable {
+ var earlier: Entry[A] = null
+ var later: Entry[A] = null
+ }
}
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 07a8501ca4..307836907c 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import annotation.tailrec
+import scala.annotation.tailrec
/** This extensible class may be used as a basis for implementing linked
* list. Type variable `A` refers to the element type of the
@@ -180,4 +180,14 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
these = these.next
}
}
+
+ /** Return a clone of this list.
+ *
+ * @return a `LinkedList` with the same elements.
+ */
+ override def clone(): This = {
+ val bf = newBuilder
+ bf ++= this
+ bf.result
+ }
}
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 207b3f3324..8ae3f20cc8 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -71,7 +71,7 @@ object Map extends MutableMapFactory[Map] {
def empty[A, B]: Map[A, B] = new HashMap[A, B]
- class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends collection.Map.WithDefault(underlying, d) with Map[A, B] {
+ class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault(underlying, d) with Map[A, B] {
override def += (kv: (A, B)) = {underlying += kv; this}
def -= (key: A) = {underlying -= key; this}
override def empty = new WithDefault(underlying.empty, d)
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 3046207533..56be5adcca 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -11,7 +11,7 @@ package scala.collection
package mutable
import generic._
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.mutable.ParMap
/** A template trait for mutable maps.
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index c9e44ac165..6fa1f4872a 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -140,6 +140,13 @@ extends AbstractSeq[A]
}
def result = this
+
+ override def clone(): MutableList[A] = {
+ val bf = newBuilder
+ bf ++= seq
+ bf.result
+ }
+
}
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index 6b5079e402..aaf26327b2 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -70,8 +70,8 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
def undo() { throw new UnsupportedOperationException("cannot undo") }
})
}
-
- abstract override def insertAll(n: Int, elems: collection.Traversable[A]) {
+
+ abstract override def insertAll(n: Int, elems: scala.collection.Traversable[A]) {
super.insertAll(n, elems)
var curr = n - 1
val msg = elems.foldLeft(new Script[A]() with Undoable {
@@ -83,5 +83,5 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
}
publish(msg)
}
-
+
}
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 2634deb819..11055f8986 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
@@ -195,7 +196,7 @@ extends AbstractMap[Key, Value]
}
}
- override def clone = {
+ override def clone() = {
val it = new OpenHashMap[Key, Value]
foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
it
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index e37cbdc712..abd8c1cdff 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -166,7 +166,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* Note: The order of elements returned is undefined.
* If you want to traverse the elements in priority queue
* order, use `clone().dequeueAll.iterator`.
- *
+ *
* @return an iterator over all the elements.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -193,7 +193,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return A reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
def compare(x: A, y: A) = ord.compare(y, x)
})
for (i <- 1 until resarr.length) revq += resarr(i)
@@ -204,7 +204,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* than that returned by the method `iterator`.
*
* Note: The order of elements returned is undefined.
- *
+ *
* @return an iterator over all elements sorted in descending order.
*/
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
@@ -236,11 +236,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
-
+
/** Converts this $coll to a list.
*
* Note: the order of elements is undefined.
- *
+ *
* @return a list containing all elements of this $coll.
*/
override def toList = this.iterator.toList
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 21c3a84699..fc7e76125e 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -177,6 +177,12 @@ extends MutableList[A]
tl.len = len - 1
tl
}
+
+ override def clone(): Queue[A] = {
+ val bf = newBuilder
+ bf ++= seq
+ bf.result
+ }
}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c7d10573c9..d29ee67580 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -96,7 +97,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
newsize = newsize * 2
val newar: Array[AnyRef] = new Array(newsize)
- compat.Platform.arraycopy(array, 0, newar, 0, size0)
+ scala.compat.Platform.arraycopy(array, 0, newar, 0, size0)
array = newar
}
}
@@ -112,7 +113,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
/** Move parts of the array.
*/
protected def copy(m: Int, n: Int, len: Int) {
- compat.Platform.arraycopy(array, m, array, n, len)
+ scala.compat.Platform.arraycopy(array, m, array, n, len)
}
}
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 582ca898c7..0a95a18392 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -19,7 +19,7 @@ import generic._
* @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: collection.Set[A] with collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
+class SetBuilder[A, Coll <: scala.collection.Set[A] with scala.collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
protected var elems: Coll = empty
def +=(x: A): this.type = { elems = elems + x; this }
def clear() { elems = empty }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 37313c8ca3..38342d4454 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -11,7 +11,7 @@ package mutable
import generic._
import script._
-import annotation.{ migration, bridge }
+import scala.annotation.{ migration, bridge }
import parallel.mutable.ParSet
/** A template trait for mutable sets of type `mutable.Set[A]`.
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 78d12f3d64..809f584f4d 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -20,7 +20,7 @@ import generic._
* @author Lucien Pereira
*
*/
-trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike[A,SortedSet[A]]
+trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.SortedSetLike[A,SortedSet[A]]
with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
/** Needs to be overridden in subclasses. */
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index db9e48d1cf..1ba531ac82 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -12,9 +12,9 @@ package scala.collection
package mutable
import generic._
-import collection.immutable.{List, Nil}
-import collection.Iterator
-import annotation.migration
+import scala.collection.immutable.{List, Nil}
+import scala.collection.Iterator
+import scala.annotation.migration
/** Factory object for the `mutable.Stack` class.
*
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 08c881dbb8..92506548e9 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -10,7 +10,7 @@ package scala.collection
package mutable
import java.lang.{ StringBuilder => JavaStringBuilder }
-import annotation.migration
+import scala.annotation.migration
import immutable.StringLike
/** A builder for mutable sequence of characters. This class provides an API
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 037b8ec5f5..6b3264a66d 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This class should be used as a mixin. It synchronizes the `Map`
* functions of the class into which it is mixed in.
@@ -41,14 +41,14 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
@migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0")
- override def values: collection.Iterable[B] = synchronized { super.values }
+ override def values: scala.collection.Iterable[B] = synchronized { super.values }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
- override def keySet: collection.Set[A] = synchronized { super.keySet }
+ override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
@migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0")
- override def keys: collection.Iterable[A] = synchronized { super.keys }
+ override def keys: scala.collection.Iterable[A] = synchronized { super.keys }
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
override def isEmpty: Boolean = synchronized { super.isEmpty }
override def contains(key: A): Boolean = synchronized {super.contains(key) }
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 00675b9119..53b0c25a8f 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -103,7 +103,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* the clone. So clone complexity in time is O(1).
*
*/
- override def clone: TreeSet[A] = {
+ override def clone(): TreeSet[A] = {
val clone = new TreeSet[A](base, from, until)
clone.avl = resolve.avl
clone.cardinality = resolve.cardinality
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 6785aba10d..1c913c7ce7 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -8,11 +8,11 @@
package scala.collection.mutable
-import collection.AbstractIterator
-import collection.Iterator
-import collection.generic._
-import annotation.tailrec
-import reflect.ClassTag
+import scala.collection.AbstractIterator
+import scala.collection.Iterator
+import scala.collection.generic._
+import scala.annotation.tailrec
+import scala.reflect.ClassTag
/** A buffer that stores elements in an unrolled linked list.
*
@@ -43,11 +43,11 @@ import reflect.ClassTag
*/
@SerialVersionUID(1L)
class UnrolledBuffer[T](implicit val tag: ClassTag[T])
-extends collection.mutable.AbstractBuffer[T]
- with collection.mutable.Buffer[T]
- with collection.mutable.BufferLike[T, UnrolledBuffer[T]]
+extends scala.collection.mutable.AbstractBuffer[T]
+ with scala.collection.mutable.Buffer[T]
+ with scala.collection.mutable.BufferLike[T, UnrolledBuffer[T]]
with GenericClassTagTraversableTemplate[T, UnrolledBuffer]
- with collection.mutable.Builder[T, UnrolledBuffer[T]]
+ with scala.collection.mutable.Builder[T, UnrolledBuffer[T]]
with Serializable
{
import UnrolledBuffer.Unrolled
@@ -153,7 +153,7 @@ extends collection.mutable.AbstractBuffer[T]
this
}
- def insertAll(idx: Int, elems: collection.Traversable[T]) =
+ def insertAll(idx: Int, elems: scala.collection.Traversable[T]) =
if (idx >= 0 && idx <= sz) {
headptr.insertAll(idx, elems, this)
sz += elems.size
@@ -181,7 +181,7 @@ extends collection.mutable.AbstractBuffer[T]
}
override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this
-
+
override def stringPrefix = "UnrolledBuffer"
}
@@ -285,7 +285,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
if (next eq null) true else false // checks if last node was thrown out
} else false
- @tailrec final def insertAll(idx: Int, t: collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
+ @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
// divide this node at the appropriate position and insert all into head
// update new next
val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff)
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index fec0fbaa3c..4d9b510e57 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -75,7 +75,7 @@ extends AbstractSeq[T]
override def stringPrefix = "WrappedArray"
/** Clones this object, including the underlying Array. */
- override def clone: WrappedArray[T] = WrappedArray make array.clone()
+ override def clone(): WrappedArray[T] = WrappedArray make array.clone()
/** Creates new builder for this collection ==> move to subclasses
*/
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 26877a32b1..f6fb32e152 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -24,13 +24,13 @@ import scala.collection.GenIterable
import scala.collection.GenTraversableOnce
import scala.collection.GenTraversable
import immutable.HashMapCombiner
-import reflect.{ClassTag, classTag}
+import scala.reflect.{ClassTag, classTag}
import java.util.concurrent.atomic.AtomicBoolean
-import annotation.unchecked.uncheckedVariance
-import annotation.unchecked.uncheckedStable
-import language.{ higherKinds, implicitConversions }
+import scala.annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedStable
+import scala.language.{ higherKinds, implicitConversions }
/** A template trait for parallel collections of type `ParIterable[T]`.
@@ -263,7 +263,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
- //protected[this] def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
+ //protected[this] def newBuilder: scala.collection.mutable.Builder[T, Repr] = newCombiner
/** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
* The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
@@ -453,7 +453,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /** Applies a function `f` to all the elements of $coll in a sequential order.
+ /** Applies a function `f` to all the elements of $coll in a undefined order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
@@ -859,7 +859,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toList: List[T] = seq.toList
- override def toIndexedSeq: collection.immutable.IndexedSeq[T] = seq.toIndexedSeq
+ override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq
override def toStream: Stream[T] = seq.toStream
@@ -867,7 +867,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
// the methods below are overridden
- override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
+ override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]]
@@ -1368,7 +1368,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val until = from + len
val blocksize = scanBlockSize
while (i < until) {
- trees += scanBlock(i, math.min(blocksize, pit.remaining))
+ trees += scanBlock(i, scala.math.min(blocksize, pit.remaining))
i += blocksize
}
@@ -1496,7 +1496,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
debugBuffer += s
}
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private[parallel] def printDebugBuffer() = println(buildString {
append =>
for (s <- debugBuffer) {
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 91eefc2aa5..4f6962ff05 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -18,7 +18,7 @@ import scala.collection.GenSeq
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
-import language.implicitConversions
+import scala.language.implicitConversions
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 58197ab2c6..2bc5e783e6 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,13 +6,8 @@
** |/ **
\* */
-
package scala.collection.parallel
-
-
-
-
import scala.collection.Map
import scala.collection.GenMap
import scala.collection.mutable.Builder
@@ -21,10 +16,6 @@ import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
-
-
-
-
/** A template trait for parallel maps.
*
* $sideeffects
@@ -75,31 +66,3 @@ object ParMap extends ParMapFactory[ParMap] {
override def default(key: A): B = d(key)
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index afd1f30903..8bf7334c5f 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -16,7 +16,7 @@ import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
import scala.collection.mutable.Builder
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
import scala.collection.generic.IdleSignalling
import scala.collection.generic.Signalling
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index a67a4d8eb7..9bf287cc39 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -190,7 +190,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(from)
- var left = math.max(until - from, 0)
+ var left = scala.math.max(until - from, 0)
cb.sizeHint(left)
while (left > 0) {
cb += next
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 7a0116b3b3..2556cd3f68 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -16,7 +16,7 @@ import scala.concurrent.forkjoin._
import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
@@ -98,7 +98,7 @@ trait Task[R, +Tp] {
*/
trait Tasks {
- private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
+ private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]()
private[parallel] def debuglog(s: String) = synchronized {
debugMessages += s
@@ -534,11 +534,11 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
trait ExecutionContextTasks extends Tasks {
-
+
def executionContext = environment
-
+
val environment: ExecutionContext
-
+
// this part is a hack which allows switching
val driver: Tasks = executionContext match {
case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match {
@@ -548,13 +548,13 @@ trait ExecutionContextTasks extends Tasks {
}
case _ => ???
}
-
+
def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task
-
+
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task
-
+
def parallelismLevel = driver.parallelismLevel
-
+
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index c9876c4d74..187e4aaf92 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -20,8 +20,8 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.immutable.{ HashMap, TrieIterator }
-import annotation.unchecked.uncheckedVariance
-import collection.parallel.Task
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.parallel.Task
@@ -118,9 +118,9 @@ self =>
def remaining = sz - i
override def toString = "HashTrieIterator(" + sz + ")"
}
-
+
/* debug */
-
+
private[parallel] def printDebugInfo() {
println("Parallel hash trie")
println("Top level inner trie type: " + trie.getClass)
@@ -159,7 +159,7 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[parallel] abstract class HashMapCombiner[K, V]
-extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
+extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
import HashMapCombiner._
val emptyTrie = HashMap.empty[K, V]
@@ -202,7 +202,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = {
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashMap[K, AnyRef]](bucks.length)
-
+
combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length))
var bitmap = 0
@@ -264,7 +264,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int)
@@ -328,7 +328,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index d1899601d7..85e2138c56 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -21,7 +21,7 @@ import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericCompanion
import scala.collection.immutable.{ HashSet, TrieIterator }
-import collection.parallel.Task
+import scala.collection.parallel.Task
@@ -132,7 +132,7 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[immutable] abstract class HashSetCombiner[T]
-extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
import HashSetCombiner._
val emptyTrie = HashSet.empty[T]
@@ -209,7 +209,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 349f4fa44c..5854844a8f 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -30,10 +30,10 @@ import scala.collection.GenIterable
* @since 2.9
*/
trait ParIterable[+T]
-extends collection/*.immutable*/.GenIterable[T]
- with collection.parallel.ParIterable[T]
+extends scala.collection/*.immutable*/.GenIterable[T]
+ with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 5060b36e7a..585e6bf541 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -28,11 +28,11 @@ import scala.collection.GenMapLike
* @since 2.9
*/
trait ParMap[K, +V]
-extends collection/*.immutable*/.GenMap[K, V]
+extends scala.collection/*.immutable*/.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
- with ParMapLike[K, V, ParMap[K, V], collection.immutable.Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]]
{
self =>
@@ -56,7 +56,7 @@ self =>
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefault[U >: V](d: K => U): collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d)
+ def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d)
/** The same map with a given default value.
*
@@ -65,7 +65,7 @@ self =>
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefaultValue[U >: V](d: U): collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
+ def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
}
@@ -79,7 +79,7 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V)
- extends collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
+ extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
override def empty = new WithDefault(underlying.empty, d)
override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d)
override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2)
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index dde6533c82..265121286d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -28,11 +28,11 @@ import scala.collection.GenSeq
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection/*.immutable*/.GenSeq[T]
- with collection.parallel.ParSeq[T]
+extends scala.collection/*.immutable*/.GenSeq[T]
+ with scala.collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.immutable.Seq[T]]
+ with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]]
{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
override def toSeq: ParSeq[T] = this
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 40429280ac..c8da509ef5 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -20,11 +20,11 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends collection/*.immutable*/.GenSet[T]
+extends scala.collection/*.immutable*/.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
- with ParSetLike[T, ParSet[T], collection.immutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]]
{
self =>
override def empty: ParSet[T] = ParHashSet[T]()
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 3694f40477..44ae7e2ce9 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -22,7 +22,7 @@ import scala.collection.parallel.Combiner
* @tparam Buff the type of the buffers that contain leaf results and this combiner chains together
*/
trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To] {
-//self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
+//self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
val chain: ArrayBuffer[Buff]
val lastbuff = chain.last
def +=(elem: Elem) = { lastbuff += elem; this }
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 6889d8b472..56cc06f99e 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -665,7 +666,7 @@ self =>
val fp = howmany / 2
List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
}
/* serialization */
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index d0c7f6050e..c7f025207c 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -9,7 +9,7 @@
package scala.collection
package parallel.mutable
-import collection.parallel.IterableSplitter
+import scala.collection.parallel.IterableSplitter
/** Parallel flat hash table.
*
@@ -19,13 +19,13 @@ import collection.parallel.IterableSplitter
*
* @author Aleksandar Prokopec
*/
-trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
+trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
override def alwaysInitSizeMap = true
abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int)
extends IterableSplitter[T] with SizeMapUtils {
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private var traversed = 0
private val itertable = table
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 8d39d6e0de..fad7ddad59 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -12,12 +12,12 @@ package mutable
-import collection.generic._
-import collection.mutable.DefaultEntry
-import collection.mutable.HashEntry
-import collection.mutable.HashTable
-import collection.mutable.UnrolledBuffer
-import collection.parallel.Task
+import scala.collection.generic._
+import scala.collection.mutable.DefaultEntry
+import scala.collection.mutable.HashEntry
+import scala.collection.mutable.HashTable
+import scala.collection.mutable.UnrolledBuffer
+import scala.collection.parallel.Task
@@ -40,14 +40,14 @@ import collection.parallel.Task
class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
- with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]]
+ with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]]
with ParHashTable[K, DefaultEntry[K, V]]
with Serializable
{
self =>
initWithContents(contents)
- type Entry = collection.mutable.DefaultEntry[K, V]
+ type Entry = scala.collection.mutable.DefaultEntry[K, V]
def this() = this(null)
@@ -57,7 +57,7 @@ self =>
protected[this] override def newCombiner = ParHashMapCombiner[K, V]
- override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
+ override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents)
def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
@@ -67,13 +67,13 @@ self =>
def get(key: K): Option[V] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
def put(key: K, value: V): Option[V] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -86,9 +86,8 @@ self =>
}
def += (kv: (K, V)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -103,12 +102,19 @@ self =>
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
+ protected def createNewEntry[V1](key: K, value: V1): Entry = {
+ new Entry(key, value.asInstanceOf[V])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[V](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
}
private[parallel] override def brokenInvariants = {
@@ -157,8 +163,8 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int)
-extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
- with collection.mutable.HashTable.HashUtils[K]
+extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
+ with scala.collection.mutable.HashTable.HashUtils[K]
{
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
@@ -190,7 +196,9 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
object table extends HashTable[K, DefaultEntry[K, V]] {
- def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
+ type Entry = DefaultEntry[K, V]
+ def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
+ def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
sizeMapInit(table.length)
}
var i = 0
@@ -251,6 +259,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
assert(h >= block * blocksize && h < (block + 1) * blocksize)
}
}
+ protected def createNewEntry[X](key: K, x: X) = ???
}
/* tasks */
@@ -302,7 +311,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
override def merge(that: FillBlocks) {
this.result += that.result
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 783f8dce77..aef9f6856b 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -10,11 +10,11 @@ package scala.collection.parallel.mutable
-import collection.generic._
-import collection.mutable.FlatHashTable
-import collection.parallel.Combiner
-import collection.mutable.UnrolledBuffer
-import collection.parallel.Task
+import scala.collection.generic._
+import scala.collection.mutable.FlatHashTable
+import scala.collection.parallel.Combiner
+import scala.collection.mutable.UnrolledBuffer
+import scala.collection.parallel.Task
@@ -36,7 +36,7 @@ import collection.parallel.Task
class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
- with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]]
+ with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]]
with ParFlatHashTable[T]
with Serializable
{
@@ -57,7 +57,7 @@ extends ParSet[T]
def clear() = clearTable()
- override def seq = new collection.mutable.HashSet(hashTableContents)
+ override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
addEntry(elem)
@@ -88,7 +88,7 @@ extends ParSet[T]
init(in, x => x)
}
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
override def debugInformation = buildString {
append =>
append("Parallel flat hash table set")
@@ -117,8 +117,8 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
-extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
-with collection.mutable.FlatHashTable.HashUtils[T] {
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
+with scala.collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
@@ -158,12 +158,12 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
val tbl = new FlatHashTable[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
+ for {
+ buffer <- buckets;
+ if buffer ne null;
+ elem <- buffer
+ } addEntry(elem.asInstanceOf[T])
}
- for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } tbl.addEntry(elem.asInstanceOf[T])
tbl.hashTableContents
}
@@ -310,7 +310,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
// the total number of successfully inserted elements is adjusted accordingly
result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2)
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 8c93732427..bb9a7b7823 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -13,8 +13,8 @@ package parallel.mutable
-import collection.mutable.HashEntry
-import collection.parallel.IterableSplitter
+import scala.collection.mutable.HashEntry
+import scala.collection.parallel.IterableSplitter
@@ -22,7 +22,7 @@ import collection.parallel.IterableSplitter
* enriching the data structure by fulfilling certain requirements
* for their parallel construction and iteration.
*/
-trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.mutable.HashTable[K, Entry] {
+trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] {
override def alwaysInitSizeMap = true
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index b5747a31cf..9281e84c03 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -26,8 +26,8 @@ import scala.collection.GenIterable
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
- with collection.parallel.ParIterable[T]
+trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
+ with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]]
with Mutable {
@@ -39,7 +39,7 @@ trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
- def seq: collection.mutable.Iterable[T]
+ def seq: scala.collection.mutable.Iterable[T]
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 4b3eae4ad1..34b3d465d2 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -12,8 +12,8 @@ package scala.collection.parallel.mutable
-import collection.generic._
-import collection.parallel.Combiner
+import scala.collection.generic._
+import scala.collection.parallel.Combiner
@@ -28,11 +28,11 @@ import collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection/*.mutable*/.GenMap[K, V]
- with collection.parallel.ParMap[K, V]
+extends scala.collection/*.mutable*/.GenMap[K, V]
+ with scala.collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
+ with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
@@ -41,7 +41,7 @@ extends collection/*.mutable*/.GenMap[K, V]
override def empty: ParMap[K, V] = new ParHashMap[K, V]
- def seq: collection.mutable.Map[K, V]
+ def seq: scala.collection.mutable.Map[K, V]
override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
@@ -53,7 +53,7 @@ extends collection/*.mutable*/.GenMap[K, V]
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefault(d: K => V): collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d)
+ def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d)
/** The same map with a given default value.
*
@@ -62,7 +62,7 @@ extends collection/*.mutable*/.GenMap[K, V]
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefaultValue(d: V): collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
+ def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
}
@@ -76,7 +76,7 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
class WithDefault[K, V](underlying: ParMap[K, V], d: K => V)
- extends collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
+ extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
override def += (kv: (K, V)) = {underlying += kv; this}
def -= (key: K) = {underlying -= key; this}
override def empty = new WithDefault(underlying.empty, d)
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 5c69c2e045..675b20949f 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -11,11 +11,11 @@ package mutable
-import collection.generic._
-import collection.mutable.Builder
-import collection.mutable.Cloneable
-import collection.generic.Growable
-import collection.generic.Shrinkable
+import scala.collection.generic._
+import scala.collection.mutable.Builder
+import scala.collection.mutable.Cloneable
+import scala.collection.generic.Growable
+import scala.collection.generic.Shrinkable
@@ -33,9 +33,9 @@ import collection.generic.Shrinkable
trait ParMapLike[K,
V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
- +Sequential <: collection.mutable.Map[K, V] with collection.mutable.MapLike[K, V, Sequential]]
-extends collection.GenMapLike[K, V, Repr]
- with collection.parallel.ParMapLike[K, V, Repr, Sequential]
+ +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]]
+extends scala.collection.GenMapLike[K, V, Repr]
+ with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential]
with Growable[(K, V)]
with Shrinkable[K]
with Cloneable[Repr]
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index f46b369494..7322d5236f 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -29,18 +29,18 @@ import scala.collection.GenSeq
* @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: collection.mutable.Seq[T]
+trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
with ParIterable[T]
- with collection.parallel.ParSeq[T]
+ with scala.collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] {
self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
//protected[this] override def newBuilder = ParSeq.newBuilder[T]
def update(i: Int, elem: T): Unit
- def seq: collection.mutable.Seq[T]
+ def seq: scala.collection.mutable.Seq[T]
override def toSeq: ParSeq[T] = this
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 6da4c8a7bc..540ecb8022 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -27,16 +27,16 @@ import scala.collection.GenSet
* @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection/*.mutable*/.GenSet[T]
+extends scala.collection/*.mutable*/.GenSet[T]
with ParIterable[T]
- with collection.parallel.ParSet[T]
+ with scala.collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
- with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]]
{
self =>
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def empty: ParSet[T] = ParHashSet()
- def seq: collection.mutable.Set[T]
+ def seq: scala.collection.mutable.Set[T]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 969fc2a405..e41d779a4d 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -35,8 +35,8 @@ trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]]
extends GenSetLike[T, Repr]
- with collection.parallel.ParIterableLike[T, Repr, Sequential]
- with collection.parallel.ParSetLike[T, Repr, Sequential]
+ with scala.collection.parallel.ParIterableLike[T, Repr, Sequential]
+ with scala.collection.parallel.ParSetLike[T, Repr, Sequential]
with Growable[T]
with Shrinkable[T]
with Cloneable[Repr]
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 359c35f1dd..5c452f628c 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -34,7 +34,7 @@ import scala.collection.concurrent.TrieMapIterator
* @author Aleksandar Prokopec
* @since 2.10
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]]
- * section on `ParTrieMap` for more information.
+ * section on `ParTrieMap` for more information.
*/
final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V])
extends ParMap[K, V]
@@ -130,7 +130,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)
- override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
+ override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = {
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
level < maxsplits
}
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 01eb17024e..68f37137f8 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -81,7 +81,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
val fp = howmany / 2
List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 9648791502..5600d0f68c 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -8,10 +8,6 @@
package scala.collection.parallel.mutable
-
-
-
-
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
@@ -23,16 +19,12 @@ import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
import scala.reflect.ClassTag
-
-
-
private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
}
-
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
@@ -85,7 +77,7 @@ extends Combiner[T, ParArray[T]] {
var pos = startpos
var arroffset = offset
while (totalleft > 0) {
- val lefthere = math.min(totalleft, curr.size - pos)
+ val lefthere = scala.math.min(totalleft, curr.size - pos)
Array.copy(curr.array, pos, array, arroffset, lefthere)
// println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr)
totalleft -= lefthere
@@ -107,13 +99,11 @@ extends Combiner[T, ParArray[T]] {
val fp = howmany / 2
List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")"
}
}
-
-
object UnrolledParArrayCombiner {
def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index e3124af12e..a95090c15b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -6,14 +6,15 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.mutable.ParArray
import scala.collection.mutable.UnrolledBuffer
-import annotation.unchecked.uncheckedVariance
-import language.implicitConversions
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.implicitConversions
/** Package object for parallel collections.
*/
@@ -41,14 +42,14 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
+ if (scala.util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = scala.util.Properties.javaVmVendor
if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
else new ThreadPoolTaskSupport
} else new ThreadPoolTaskSupport
val defaultTaskSupport: TaskSupport = getTaskSupport
-
+
def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
c match {
case pc: ParIterableLike[_, _, _] => pc.tasksupport = t
@@ -56,7 +57,7 @@ package object parallel {
}
c
}
-
+
/* implicit conversions */
implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] {
@@ -66,7 +67,7 @@ package object parallel {
def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
}
}
- implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
def isParallel = t.isInstanceOf[Parallel]
def isParIterable = t.isInstanceOf[ParIterable[_]]
def asParIterable = t.asInstanceOf[ParIterable[T]]
@@ -149,7 +150,7 @@ package parallel {
* Automatically forwards the signal delegate when splitting.
*/
private[parallel] class BufferSplitter[T]
- (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: collection.generic.Signalling)
+ (private val buffer: scala.collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: scala.collection.generic.Signalling)
extends IterableSplitter[T] {
signalDelegate = _sigdel
def hasNext = index < until
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index f18ce12e6c..77c12a8e58 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -109,7 +109,7 @@ object Platform {
* `System.getProperty("line.separator")`
* with a default value of "\n".
*/
- val EOL = util.Properties.lineSeparator
+ val EOL = scala.util.Properties.lineSeparator
/** The current time in milliseconds. The time is counted since 1 January 1970
* UTC.
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index 99bdfbc5a9..655115349a 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -16,15 +16,34 @@ import scala.concurrent.util.Duration
trait Awaitable[+T] {
/**
- * Should throw [[scala.concurrent.TimeoutException]] if it times out
+ * Await the "resolved" state of this Awaitable.
* This method should not be called directly.
+ *
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the Awaitable itself
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type
/**
- * Throws exceptions if it cannot produce a T within the specified time.
+ * Await and return the result of this Awaitable, which is either of type T or a thrown exception (any Throwable).
* This method should not be called directly.
+ *
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the value if the Awaitable was successful within the specific maximum wait time
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
index 640560a174..83333a9e94 100644
--- a/src/library/scala/concurrent/BlockContext.scala
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -8,9 +8,6 @@
package scala.concurrent
-import java.lang.Thread
-import scala.concurrent.util.Duration
-
/**
* A context to be notified by `scala.concurrent.blocking` when
* a thread is about to block. In effect this trait provides
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 1be6050303..844ec14241 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -10,7 +10,6 @@ package scala.concurrent
import java.util.concurrent.{ ExecutorService, Executor }
-import scala.concurrent.util.Duration
import scala.annotation.implicitNotFound
import scala.util.Try
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index bc0b437a33..111900e7bc 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import language.higherKinds
+import scala.language.higherKinds
import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
@@ -16,7 +16,6 @@ import java.lang.{ Iterable => JIterable }
import java.util.{ LinkedList => JLinkedList }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
-import scala.concurrent.util.Duration
import scala.util.control.NonFatal
import scala.Option
import scala.util.{Try, Success, Failure}
@@ -213,7 +212,7 @@ trait Future[+T] extends Awaitable[T] {
* this future, or the 'f' function to the failed result. If there is any non-fatal
* exception thrown when 's' or 'f' is applied, that exception will be propagated
* to the resulting future.
- *
+ *
* @param s function that transforms a successful result of the receiver into a
* successful result of the returned future
* @param f function that transforms a failure of the receiver into a failure of
@@ -429,7 +428,7 @@ trait Future[+T] extends Awaitable[T] {
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
val p = Promise[(T, U)]()
-
+
this onComplete {
case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
case Success(r) =>
@@ -440,7 +439,7 @@ trait Future[+T] extends Awaitable[T] {
case f => p failure f
}
}
-
+
p.future
}
@@ -556,7 +555,7 @@ trait Future[+T] extends Awaitable[T] {
* Note: using this method yields nondeterministic dataflow programs.
*/
object Future {
-
+
private[concurrent] val toBoxed = Map[Class[_], Class[_]](
classOf[Boolean] -> classOf[java.lang.Boolean],
classOf[Byte] -> classOf[java.lang.Byte],
@@ -570,19 +569,19 @@ object Future {
)
/** Creates an already completed Future with the specified exception.
- *
+ *
* @tparam T the type of the value in the future
* @return the newly created `Future` object
*/
def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
/** Creates an already completed Future with the specified result.
- *
+ *
* @tparam T the type of the value in the future
* @return the newly created `Future` object
*/
def successful[T](result: T): Future[T] = Promise.successful(result).future
-
+
/** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
*
* The result becomes available once the asynchronous computation is completed.
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index d7f1e1c2f9..eeadaddb5e 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import language.{implicitConversions, higherKinds}
+import scala.language.{implicitConversions, higherKinds}
/** The `FutureTaskRunner</code> trait is a base trait of task runners
* that provide some sort of future abstraction.
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index ffb9926fef..f66d64bc3b 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -9,7 +9,7 @@
package scala.concurrent
import java.util.concurrent.{ExecutorService, Executor}
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `JavaConversions` object provides implicit converstions supporting
* interoperability between Scala and Java concurrency classes.
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 2e11ac42b0..2037c43cf8 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import language.{higherKinds, implicitConversions}
+import scala.language.{higherKinds, implicitConversions}
/** The `TaskRunner` trait...
*
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index 594555d49b..4b777ba069 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -9,7 +9,7 @@
package scala.concurrent
import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `ThreadPoolRunner` trait uses a `java.util.concurrent.ExecutorService`
* to run submitted tasks.
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
index ab709e0210..067269a911 100644
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ b/src/library/scala/concurrent/ThreadRunner.scala
@@ -9,7 +9,7 @@
package scala.concurrent
import java.lang.Thread
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `ThreadRunner` trait...
*
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 875a558887..c517a05a81 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -14,7 +14,6 @@ import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorS
import java.util.Collection
import scala.concurrent.forkjoin._
import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
-import scala.concurrent.util.Duration
import scala.util.control.NonFatal
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index b19bed004b..9228872f2b 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -12,7 +12,7 @@ package scala.concurrent.impl
import java.util.concurrent.TimeUnit.NANOSECONDS
import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
-import scala.concurrent.util.Duration
+import scala.concurrent.util.{ Duration, Deadline, FiniteDuration }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
@@ -64,30 +64,40 @@ private[concurrent] object Promise {
protected final def tryAwait(atMost: Duration): Boolean = {
@tailrec
- def awaitUnsafe(waitTimeNanos: Long): Boolean = {
- if (value.isEmpty && waitTimeNanos > 0) {
- val ms = NANOSECONDS.toMillis(waitTimeNanos)
- val ns = (waitTimeNanos % 1000000l).toInt // as per object.wait spec
- val start = System.nanoTime()
- try {
- synchronized {
- if (!isCompleted) wait(ms, ns) // previously - this was a `while`, ending up in an infinite loop
- }
- } catch {
- case e: InterruptedException =>
- }
+ def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
+ if (!isCompleted && nextWait > Duration.Zero) {
+ val ms = nextWait.toMillis
+ val ns = (nextWait.toNanos % 1000000l).toInt // as per object.wait spec
+
+ synchronized { if (!isCompleted) wait(ms, ns) }
- awaitUnsafe(waitTimeNanos - (System.nanoTime() - start))
+ awaitUnsafe(deadline, deadline.timeLeft)
} else
isCompleted
}
- awaitUnsafe(if (atMost.isFinite) atMost.toNanos else Long.MaxValue)
+ @tailrec
+ def awaitUnbounded(): Boolean = {
+ if (isCompleted) true
+ else {
+ synchronized { if (!isCompleted) wait() }
+ awaitUnbounded()
+ }
+ }
+
+ import Duration.Undefined
+ atMost match {
+ case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf => awaitUnbounded
+ case Duration.MinusInf => isCompleted
+ case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
+ }
}
@throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
if (isCompleted || tryAwait(atMost)) this
- else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds")
+ else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index a2ef42fac8..1d06341d4d 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -67,26 +67,39 @@ package concurrent {
*/
object Await {
/**
+ * Await the "resolved" state of this Awaitable.
* Invokes ready() on the awaitable, properly wrapped by a call to `scala.concurrent.blocking`.
- * ready() blocks until the awaitable has completed or the timeout expires.
*
- * Throws a TimeoutException if the timeout expires, as that is in the contract of `Awaitable.ready`.
- * @param awaitable the `Awaitable` on which `ready` is to be called
- * @param atMost the maximum timeout for which to wait
- * @return the result of `awaitable.ready` which is defined to be the awaitable itself.
+ * @param awaitable
+ * the `Awaitable` on which `ready` is to be called
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the awaitable itself
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type =
blocking(awaitable.ready(atMost)(AwaitPermission))
/**
+ * Await and return the result of this Awaitable, which is either of type T or a thrown exception (any Throwable).
* Invokes result() on the awaitable, properly wrapped by a call to `scala.concurrent.blocking`.
- * result() blocks until the awaitable has completed or the timeout expires.
*
- * Throws a TimeoutException if the timeout expires, or any exception thrown by `Awaitable.result`.
- * @param awaitable the `Awaitable` on which `result` is to be called
- * @param atMost the maximum timeout for which to wait
- * @return the result of `awaitable.result`
+ * @param awaitable
+ * the `Awaitable` on which `result` is to be called
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the value if the Awaitable was successful within the specific maximum wait time
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[Exception])
def result[T](awaitable: Awaitable[T], atMost: Duration): T =
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala
index bab664727e..3f8b98831e 100644
--- a/src/library/scala/concurrent/util/Duration.scala
+++ b/src/library/scala/concurrent/util/Duration.scala
@@ -10,212 +10,376 @@ package scala.concurrent.util
import java.util.concurrent.TimeUnit
import TimeUnit._
-import java.lang.{ Double => JDouble }
-import language.implicitConversions
-
-case class Deadline private (time: Duration) {
- def +(other: Duration): Deadline = copy(time = time + other)
- def -(other: Duration): Deadline = copy(time = time - other)
- def -(other: Deadline): Duration = time - other.time
- def timeLeft: Duration = this - Deadline.now
+import java.lang.{ Double => JDouble, Long => JLong }
+import scala.language.implicitConversions
+
+/**
+ * This class stores a deadline, as obtained via `Deadline.now` or the
+ * duration DSL:
+ *
+ * {{{
+ * import scala.concurrent.util.duration._
+ * 3.seconds.fromNow
+ * }}}
+ *
+ * Its main purpose is to manage repeated attempts to achieve something (like
+ * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All
+ * durations are measured according to `System.nanoTime` aka wall-time; this
+ * does not take into account changes to the system clock (such as leap
+ * seconds).
+ */
+case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] {
+ /**
+ * Return a deadline advanced (i.e. moved into the future) by the given duration.
+ */
+ def +(other: FiniteDuration): Deadline = copy(time = time + other)
+ /**
+ * Return a deadline moved backwards (i.e. towards the past) by the given duration.
+ */
+ def -(other: FiniteDuration): Deadline = copy(time = time - other)
+ /**
+ * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative).
+ */
+ def -(other: Deadline): FiniteDuration = time - other.time
+ /**
+ * Calculate time difference between this duration and now; the result is negative if the deadline has passed.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def timeLeft: FiniteDuration = this - Deadline.now
+ /**
+ * Determine whether the deadline still lies in the future at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
def hasTimeLeft(): Boolean = !isOverdue()
+ /**
+ * Determine whether the deadline lies in the past at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ def compare(other: Deadline) = time compare other.time
}
object Deadline {
+ /**
+ * Construct a deadline due exactly at the point where this method is called. Useful for then
+ * advancing it to obtain a future deadline, or for sampling the current time exactly once and
+ * then comparing it to multiple deadlines (using subtraction).
+ */
def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ implicit object DeadlineIsOrdered extends Ordering[Deadline] {
+ def compare(a: Deadline, b: Deadline) = a compare b
+ }
}
object Duration {
+ /**
+ * This implicit conversion allows the use of a Deadline in place of a Duration, which will
+ * insert the time left until the deadline in its place.
+ */
implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft
+ /**
+ * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
+ *
+ * - the unit is NANOSECONDS
+ * - and the length has an absolute value greater than 2^53
+ *
+ * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
+ /**
+ * Construct a finite duration from the given length and time unit. The unit given is retained
+ * throughout calculations as long as possible, so that it can be retrieved later.
+ */
def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
- def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
-
/**
- * Construct a Duration by parsing a String. In case of a format error, a
- * RuntimeException is thrown. See `unapply(String)` for more information.
+ * Construct a finite duration from the given length and time unit, where the latter is
+ * looked up in a list of string representation. Valid choices are:
+ *
+ * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond`
+ * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days").
*/
- def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error " + s)
+ def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
- private val RE = ("""^\s*([\+|-]?\d+(?:\.\d+)?)\s*""" + // length part
- "(?:" + // units are distinguished in separate match groups
- "(d|day|days)|" +
- "(h|hour|hours)|" +
- "(min|minute|minutes)|" +
- "(s|sec|second|seconds)|" +
- "(ms|milli|millis|millisecond|milliseconds)|" +
- "(µs|micro|micros|microsecond|microseconds)|" +
- "(ns|nano|nanos|nanosecond|nanoseconds)" +
- """)\s*$""").r // close the non-capturing group
- private val REinf = """^\s*(?:\+|Plus)?Inf\s*$""".r
- private val REminf = """^\s*(?:-|Minus)Inf\s*""".r
+ // Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53
+ final val maxPreciseDouble = 9007199254740992d
/**
- * Deconstruct a Duration into `Long` length and [[java.util.concurrent.TimeUnit]] if it is a
- * [[scala.util.concurrent.FiniteDuration]].
+ * Parse String into Duration. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
*
- * @param d Duration to be deconstructed.
+ * @throws NumberFormatException if format is not parseable
*/
- def unapply(d: Duration): Option[(Long, TimeUnit)] = {
- if (d.finite_?) {
- Some((d.length, d.unit))
- } else {
- None
+ def apply(s: String): Duration = {
+ val s1: String = s filterNot (_.isWhitespace)
+ s1 match {
+ case "Inf" | "PlusInf" | "+Inf" => Inf
+ case "MinusInf" | "-Inf" => MinusInf
+ case _ =>
+ val unitName = s1.reverse takeWhile (_.isLetter) reverse;
+ timeUnit get unitName match {
+ case Some(unit) =>
+ val valueStr = s1 dropRight unitName.length
+ val valueD = JDouble.parseDouble(valueStr)
+ if (valueD >= -maxPreciseDouble && valueD <= maxPreciseDouble) Duration(valueD, unit)
+ else Duration(JLong.parseLong(valueStr), unit)
+ case _ => throw new NumberFormatException("format error " + s)
+ }
}
}
+ // "ms milli millisecond" -> List("ms", "milli", "millis", "millisecond", "milliseconds")
+ private[this] def words(s: String) = (s.trim split "\\s+").toList
+ private[this] def expandLabels(labels: String): List[String] = {
+ val hd :: rest = words(labels)
+ hd :: rest.flatMap(s => List(s, s + "s"))
+ }
+ private[this] val timeUnitLabels = List(
+ DAYS -> "d day",
+ HOURS -> "h hour",
+ MINUTES -> "min minute",
+ SECONDS -> "s sec second",
+ MILLISECONDS -> "ms milli millisecond",
+ MICROSECONDS -> "µs micro microsecond",
+ NANOSECONDS -> "ns nano nanosecond"
+ )
+
+ // TimeUnit => standard label
+ protected[util] val timeUnitName: Map[TimeUnit, String] =
+ timeUnitLabels.toMap mapValues (s => words(s).last) toMap
+
+ // Label => TimeUnit
+ protected[util] val timeUnit: Map[String, TimeUnit] =
+ timeUnitLabels flatMap { case (unit, names) => expandLabels(names) map (_ -> unit) } toMap
+
/**
- * Parse String, return None if no match. Format is `"<length><unit>"`, where
- * whitespace is allowed before, between and after the parts. Infinities are
- * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ * Extract length and time unit out of a string, where the format must match the description for [[Duration$.apply(String):Duration apply(String)]].
+ * The extractor will not match for malformed strings or non-finite durations.
*/
- def unapply(s: String): Option[Duration] = s match {
- case RE(length, d, h, m, s, ms, mus, ns) ⇒
- if (d ne null)
- Some(Duration(JDouble.parseDouble(length), DAYS))
- else if (h ne null)
- Some(Duration(JDouble.parseDouble(length), HOURS))
- else if (m ne null)
- Some(Duration(JDouble.parseDouble(length), MINUTES))
- else if (s ne null)
- Some(Duration(JDouble.parseDouble(length), SECONDS))
- else if (ms ne null)
- Some(Duration(JDouble.parseDouble(length), MILLISECONDS))
- else if (mus ne null)
- Some(Duration(JDouble.parseDouble(length), MICROSECONDS))
- else if (ns ne null)
- Some(Duration(JDouble.parseDouble(length), NANOSECONDS))
- else
- sys.error("made some error in regex (should not be possible)")
- case REinf() ⇒ Some(Inf)
- case REminf() ⇒ Some(MinusInf)
- case _ ⇒ None
+ def unapply(s: String): Option[(Long, TimeUnit)] =
+ ( try Some(apply(s)) catch { case _: RuntimeException => None } ) flatMap unapply
+
+ /**
+ * Extract length and time unit out of a duration, if it is finite.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] =
+ if (d.isFinite) Some((d.length, d.unit)) else None
+
+ /**
+ * Construct a possibly infinite or undefined Duration from the given number of nanoseconds.
+ *
+ * - `Double.PositiveInfinity` is mapped to [[Duration.Inf]]
+ * - `Double.NegativeInfinity` is mapped to [[Duration.MinusInf]]
+ * - `Double.NaN` is mapped to [[Duration.Undefined]]
+ * - `-0d` is mapped to [[Duration.Zero]] (exactly like `0d`)
+ *
+ * The semantics of the resulting Duration objects matches the semantics of their Double
+ * counterparts with respect to arithmetic operations.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def fromNanos(nanos: Double): Duration = {
+ if (nanos.isInfinite)
+ if (nanos > 0) Inf else MinusInf
+ else if (nanos.isNaN)
+ Undefined
+ else if (nanos > Long.MaxValue || nanos < Long.MinValue)
+ throw new IllegalArgumentException("trying to construct too large duration with " + nanos + "ns")
+ else
+ fromNanos((nanos + 0.5).toLong)
}
- def fromNanos(nanos: Double): FiniteDuration =
- fromNanos((nanos + 0.5).asInstanceOf[Long])
+ private[this] final val µs_per_ns = 1000L
+ private[this] final val ms_per_ns = µs_per_ns * 1000
+ private[this] final val s_per_ns = ms_per_ns * 1000
+ private[this] final val min_per_ns = s_per_ns * 60
+ private[this] final val h_per_ns = min_per_ns * 60
+ private[this] final val d_per_ns = h_per_ns * 24
+ /**
+ * Construct a finite duration from the given number of nanoseconds. The
+ * result will have the coarsest possible time unit which can exactly express
+ * this duration.
+ *
+ * @throws IllegalArgumentException for `Long.MinValue` since that would lead to inconsistent behavior afterwards (cannot be negated)
+ */
def fromNanos(nanos: Long): FiniteDuration = {
- if (nanos % 86400000000000L == 0) {
- Duration(nanos / 86400000000000L, DAYS)
- } else if (nanos % 3600000000000L == 0) {
- Duration(nanos / 3600000000000L, HOURS)
- } else if (nanos % 60000000000L == 0) {
- Duration(nanos / 60000000000L, MINUTES)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000L == 0) {
- Duration(nanos / 1000000L, MILLISECONDS)
- } else if (nanos % 1000L == 0) {
- Duration(nanos / 1000L, MICROSECONDS)
- } else {
- Duration(nanos, NANOSECONDS)
- }
+ if (nanos % d_per_ns == 0) Duration(nanos / d_per_ns, DAYS)
+ else if (nanos % h_per_ns == 0) Duration(nanos / h_per_ns, HOURS)
+ else if (nanos % min_per_ns == 0) Duration(nanos / min_per_ns, MINUTES)
+ else if (nanos % s_per_ns == 0) Duration(nanos / s_per_ns, SECONDS)
+ else if (nanos % ms_per_ns == 0) Duration(nanos / ms_per_ns, MILLISECONDS)
+ else if (nanos % µs_per_ns == 0) Duration(nanos / µs_per_ns, MICROSECONDS)
+ else Duration(nanos, NANOSECONDS)
}
/**
- * Parse TimeUnit from string representation.
+ * Preconstructed value of `0.days`.
*/
- protected[util] def timeUnit(unit: String): TimeUnit = unit.toLowerCase match {
- case "d" | "day" | "days" => DAYS
- case "h" | "hour" | "hours" => HOURS
- case "min" | "minute" | "minutes" => MINUTES
- case "s" | "sec" | "second" | "seconds" => SECONDS
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" => MILLISECONDS
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" => MICROSECONDS
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" => NANOSECONDS
- }
+ // unit as coarse as possible to keep (_ + Zero) sane unit-wise
+ val Zero: FiniteDuration = new FiniteDuration(0, DAYS)
- val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS)
- val Undefined: Duration = new Duration with Infinite {
+ /**
+ * The Undefined value corresponds closely to Double.NaN:
+ *
+ * - it is the result of otherwise invalid operations
+ * - it does not equal itself (according to `equals()`)
+ * - it compares greater than any other Duration apart from itself (for which `compare` returns 0)
+ *
+ * The particular comparison semantics mirror those of Double.NaN.
+ *
+ * '''''Use `eq` when checking an input of a method against this value.'''''
+ */
+ val Undefined: Infinite = new Infinite {
override def toString = "Duration.Undefined"
- override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
- override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
- override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
- override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
- override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
- override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
- def compare(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
+ override def equals(other: Any) = false
+ override def +(other: Duration): Duration = this
+ override def -(other: Duration): Duration = this
+ override def *(factor: Double): Duration = this
+ override def /(factor: Double): Duration = this
+ override def /(other: Duration): Double = Double.NaN
+ def compare(other: Duration) = if (other eq this) 0 else 1
+ def unary_- : Duration = this
+ def toUnit(unit: TimeUnit): Double = Double.NaN
}
- trait Infinite {
- this: Duration =>
-
- def +(other: Duration): Duration =
- other match {
- case _: this.type => this
- case _: Infinite => throw new IllegalArgumentException("illegal addition of infinities")
- case _ => this
- }
- def -(other: Duration): Duration =
- other match {
- case _: this.type => throw new IllegalArgumentException("illegal subtraction of infinities")
- case _ => this
- }
- def *(factor: Double): Duration = this
- def /(factor: Double): Duration = this
- def /(other: Duration): Double =
- other match {
- case _: Infinite => throw new IllegalArgumentException("illegal division of infinities")
- // maybe questionable but pragmatic: Inf / 0 => Inf
- case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
- }
-
- def finite_? = false
-
- def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
- def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
- def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
- def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
- def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
- def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
- def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
- def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
- def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
- def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
+ sealed abstract class Infinite extends Duration {
+ def +(other: Duration): Duration = other match {
+ case x if x eq Undefined => Undefined
+ case x: Infinite if x ne this => Undefined
+ case _ => this
+ }
+ def -(other: Duration): Duration = other match {
+ case x if x eq Undefined => Undefined
+ case x: Infinite if x eq this => Undefined
+ case _ => this
+ }
+
+ def *(factor: Double): Duration =
+ if (factor == 0d || factor.isNaN) Undefined
+ else if (factor < 0d) -this
+ else this
+ def /(factor: Double): Duration =
+ if (factor.isNaN || factor.isInfinite) Undefined
+ else if ((factor compare 0d) < 0) -this
+ else this
+ def /(other: Duration): Double = other match {
+ case _: Infinite => Double.NaN
+ case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ }
+ final def isFinite() = false
+
+ private[this] def fail(what: String) = throw new IllegalArgumentException(s"$what not allowed on infinite Durations")
+ final def length: Long = fail("length")
+ final def unit: TimeUnit = fail("unit")
+ final def toNanos: Long = fail("toNanos")
+ final def toMicros: Long = fail("toMicros")
+ final def toMillis: Long = fail("toMillis")
+ final def toSeconds: Long = fail("toSeconds")
+ final def toMinutes: Long = fail("toMinutes")
+ final def toHours: Long = fail("toHours")
+ final def toDays: Long = fail("toDays")
}
/**
- * Infinite duration: greater than any other and not equal to any other,
- * including itself.
+ * Infinite duration: greater than any other (apart from Undefined) and not equal to any other
+ * but itself. This value closely corresponds to Double.PositiveInfinity,
+ * matching its semantics in arithmetic operations.
*/
- val Inf: Duration = new Duration with Infinite {
+ val Inf: Infinite = new Infinite {
override def toString = "Duration.Inf"
- def compare(other: Duration) = if (other eq this) 0 else 1
+ def compare(other: Duration) = other match {
+ case x if x eq Undefined => -1 // Undefined != Undefined
+ case x if x eq this => 0 // `case Inf` will include null checks in the byte code
+ case _ => 1
+ }
def unary_- : Duration = MinusInf
+ def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity
}
/**
- * Infinite negative duration: lesser than any other and not equal to any other,
- * including itself.
+ * Infinite duration: less than any other and not equal to any other
+ * but itself. This value closely corresponds to Double.NegativeInfinity,
+ * matching its semantics in arithmetic operations.
*/
- val MinusInf: Duration = new Duration with Infinite {
+ val MinusInf: Infinite = new Infinite {
override def toString = "Duration.MinusInf"
def compare(other: Duration) = if (other eq this) 0 else -1
def unary_- : Duration = Inf
+ def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity
}
// Java Factories
+
+ /**
+ * Construct a finite duration from the given length and time unit. The unit given is retained
+ * throughout calculations as long as possible, so that it can be retrieved later.
+ */
def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit)
- def create(length: Double, unit: TimeUnit): FiniteDuration = apply(length, unit)
- def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
- def parse(s: String): Duration = unapply(s).get
+ /**
+ * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
+ *
+ * - the unit is NANOSECONDS
+ * - and the length has an absolute value greater than 2^53
+ *
+ * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
+ /**
+ * Construct a finite duration from the given length and time unit, where the latter is
+ * looked up in a list of string representation. Valid choices are:
+ *
+ * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond`
+ * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days").
+ */
+ def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
+ /**
+ * Parse String into Duration. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ *
+ * @throws NumberFormatException if format is not parseable
+ */
+ def create(s: String): Duration = apply(s)
+ /**
+ * The natural ordering of durations matches the natural ordering for Double, including non-finite values.
+ */
implicit object DurationIsOrdered extends Ordering[Duration] {
def compare(a: Duration, b: Duration) = a compare b
}
}
/**
- * Utility for working with java.util.concurrent.TimeUnit durations.
+ * <h2>Utility for working with java.util.concurrent.TimeUnit durations.</h2>
+ *
+ * '''''This class is not meant as a general purpose representation of time, it is
+ * optimized for the needs of `scala.concurrent`.'''''
+ *
+ * <h2>Basic Usage</h2>
*
* <p/>
* Examples:
- * <pre>
+ * {{{
* import scala.concurrent.util.Duration
* import java.util.concurrent.TimeUnit
*
@@ -225,59 +389,213 @@ object Duration {
* duration.toNanos
* duration < 1.second
* duration <= Duration.Inf
- * </pre>
+ * }}}
+ *
+ * '''''Invoking inexpressible conversions (like calling `toSeconds` on an infinite duration) will throw an IllegalArgumentException.'''''
*
* <p/>
* Implicits are also provided for Int, Long and Double. Example usage:
- * <pre>
+ * {{{
* import scala.concurrent.util.Duration._
*
* val duration = 100 millis
- * </pre>
+ * }}}
+ *
+ * '''''The DSL provided by the implicit conversions always allows construction of finite durations, even for infinite Double inputs; use Duration.Inf instead.'''''
*
* Extractors, parsing and arithmetic are also included:
- * <pre>
+ * {{{
* val d = Duration("1.2 µs")
* val Duration(length, unit) = 5 millis
* val d2 = d * 2.5
* val d3 = d2 + 1.millisecond
- * </pre>
+ * }}}
+ *
+ * <h2>Handling of Time Units</h2>
+ *
+ * Calculations performed on finite durations always retain the more precise unit of either operand, no matter
+ * whether a coarser unit would be able to exactly express the same duration. This means that Duration can be
+ * used as a lossless container for a (length, unit) pair if it is constructed using the corresponding methods
+ * and no arithmetic is performed on it; adding/subtracting durations should in that case be done with care.
+ *
+ * <h2>Correspondence to Double Semantics</h2>
+ *
+ * The semantics of arithmetic operations on Duration are two-fold:
+ *
+ * - exact addition/subtraction with nanosecond resolution for finite durations, independent of the summands' magnitude
+ * - isomorphic to `java.lang.Double` when it comes to infinite or undefined values
+ *
+ * The conversion between Duration and Double is done using [[Duration.toUnit]] (with unit NANOSECONDS)
+ * and [[Duration$.fromNanos(Double):Duration Duration.fromNanos(Double)]].
+ *
+ * <h2>Ordering</h2>
+ *
+ * The default ordering is consistent with the ordering of Double numbers, which means that Undefined is
+ * considered greater than all other durations, including [[Duration.Inf]].
+ *
+ * @define exc @throws IllegalArgumentException when invoked on a non-finite duration
+ *
+ * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is +-(2^63-1)ns, and no conversion to infinite durations takes place.
*/
-abstract class Duration extends Serializable with Ordered[Duration] {
+sealed abstract class Duration extends Serializable with Ordered[Duration] {
+ /**
+ * Obtain the length of this Duration measured in the unit obtained by the `unit` method.
+ *
+ * $exc
+ */
def length: Long
+ /**
+ * Obtain the time unit in which the length of this duration is measured.
+ *
+ * $exc
+ */
def unit: TimeUnit
+ /**
+ * Return the length of this duration measured in whole nanoseconds, rounding towards zero.
+ *
+ * $exc
+ */
def toNanos: Long
+ /**
+ * Return the length of this duration measured in whole microseconds, rounding towards zero.
+ *
+ * $exc
+ */
def toMicros: Long
+ /**
+ * Return the length of this duration measured in whole milliseconds, rounding towards zero.
+ *
+ * $exc
+ */
def toMillis: Long
+ /**
+ * Return the length of this duration measured in whole seconds, rounding towards zero.
+ *
+ * $exc
+ */
def toSeconds: Long
+ /**
+ * Return the length of this duration measured in whole minutes, rounding towards zero.
+ *
+ * $exc
+ */
def toMinutes: Long
+ /**
+ * Return the length of this duration measured in whole hours, rounding towards zero.
+ *
+ * $exc
+ */
def toHours: Long
+ /**
+ * Return the length of this duration measured in whole days, rounding towards zero.
+ *
+ * $exc
+ */
def toDays: Long
+ /**
+ * Return the number of nanoseconds as floating point number, scaled down to the given unit.
+ * The result may not precisely represent this duration due to the Double datatype's inherent
+ * limitations (mantissa size effectively 53 bits). Non-finite durations are represented as
+ * - [[Duration.Undefined]] is mapped to Double.NaN
+ * - [[Duration.Inf]] is mapped to Double.PositiveInfinity
+ * - [[Duration.MinusInf]] is mapped to Double.NegativeInfinity
+ */
def toUnit(unit: TimeUnit): Double
+ /**
+ * Return the sum of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
def +(other: Duration): Duration
+ /**
+ * Return the difference of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
def -(other: Duration): Duration
+ /**
+ * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
def *(factor: Double): Duration
+ /**
+ * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
def /(factor: Double): Duration
+ /**
+ * Return the quotient of this and that duration as floating-point number. The semantics are
+ * determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
+ */
def /(other: Duration): Double
+ /**
+ * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
+ */
def unary_- : Duration
- def finite_? : Boolean
+ /**
+ * This method returns whether this duration is finite, which is not the same as
+ * `!isInfinite` for Double because this method also returns `false` for [[Duration.Undefined]].
+ */
+ def isFinite(): Boolean
+ /**
+ * Return the smaller of this and that duration as determined by the natural ordering.
+ */
def min(other: Duration): Duration = if (this < other) this else other
+ /**
+ * Return the larger of this and that duration as determined by the natural ordering.
+ */
def max(other: Duration): Duration = if (this > other) this else other
- def fromNow: Deadline = Deadline.now + this
// Java API
- def lt(other: Duration) = this < other
- def lteq(other: Duration) = this <= other
- def gt(other: Duration) = this > other
- def gteq(other: Duration) = this >= other
- def plus(other: Duration) = this + other
+
+ /**
+ * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def div(factor: Double) = this / factor
+ /**
+ * Return the quotient of this and that duration as floating-point number. The semantics are
+ * determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
+ */
+ def div(other: Duration) = this / other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ /**
+ * Return the difference of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
def minus(other: Duration) = this - other
- def mul(factor: Double) = this * factor
- def div(factor: Double) = this / factor
- def div(other: Duration) = this / other
- def neg() = -this
- def isFinite() = finite_?
+ /**
+ * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def mul(factor: Double) = this * factor
+ /**
+ * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
+ */
+ def neg() = -this
+ /**
+ * Return the sum of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def plus(other: Duration) = this + other
}
object FiniteDuration {
@@ -286,252 +604,198 @@ object FiniteDuration {
}
def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit)
-
- def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
-
+ def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
+
+ // limit on abs. value of durations in their units
+ private final val max_ns = Long.MaxValue
+ private final val max_µs = max_ns / 1000
+ private final val max_ms = max_µs / 1000
+ private final val max_s = max_ms / 1000
+ private final val max_min= max_s / 60
+ private final val max_h = max_min / 60
+ private final val max_d = max_h / 24
}
-class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+/**
+ * This class represents a finite duration. Its addition and subtraction operators are overloaded to retain
+ * this guarantee statically. The range of this class is limited to +-(2^63-1)ns, which is roughly 292 years.
+ */
+final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import FiniteDuration._
import Duration._
- def toNanos = unit.toNanos(length)
- def toMicros = unit.toMicros(length)
- def toMillis = unit.toMillis(length)
+ private[this] def bounded(max: Long) = -max <= length && length <= max
+
+ require(unit match {
+ /*
+ * enforce the 2^63-1 ns limit, must be pos/neg symmetrical because of unary_-
+ */
+ case NANOSECONDS ⇒ bounded(max_ns)
+ case MICROSECONDS ⇒ bounded(max_µs)
+ case MILLISECONDS ⇒ bounded(max_ms)
+ case SECONDS ⇒ bounded(max_s)
+ case MINUTES ⇒ bounded(max_min)
+ case HOURS ⇒ bounded(max_h)
+ case DAYS ⇒ bounded(max_d)
+ case _ ⇒
+ val v = DAYS.convert(length, unit)
+ -max_d <= v && v <= max_d
+ }, "Duration is limited to +-(2^63-1)ns (ca. 292 years)")
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
def toSeconds = unit.toSeconds(length)
def toMinutes = unit.toMinutes(length)
- def toHours = unit.toHours(length)
- def toDays = unit.toDays(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u)
- override def toString = this match {
- case Duration(1, DAYS) => "1 day"
- case Duration(x, DAYS) => x + " days"
- case Duration(1, HOURS) => "1 hour"
- case Duration(x, HOURS) => x + " hours"
- case Duration(1, MINUTES) => "1 minute"
- case Duration(x, MINUTES) => x + " minutes"
- case Duration(1, SECONDS) => "1 second"
- case Duration(x, SECONDS) => x + " seconds"
- case Duration(1, MILLISECONDS) => "1 millisecond"
- case Duration(x, MILLISECONDS) => x + " milliseconds"
- case Duration(1, MICROSECONDS) => "1 microsecond"
- case Duration(x, MICROSECONDS) => x + " microseconds"
- case Duration(1, NANOSECONDS) => "1 nanosecond"
- case Duration(x, NANOSECONDS) => x + " nanoseconds"
- }
+ /**
+ * Construct a [[Deadline]] from this duration by adding it to the current instant `Deadline.now`.
+ */
+ def fromNow: Deadline = Deadline.now + this
- def compare(other: Duration) =
- if (other.finite_?) {
- val me = toNanos
- val o = other.toNanos
- if (me > o) 1 else if (me < o) -1 else 0
- } else -other.compare(this)
-
- def +(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
+ private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" )
+ override def toString = "" + length + " " + unitString
- def -(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
+ def compare(other: Duration) = other match {
+ case x: FiniteDuration => toNanos compare x.toNanos
+ case _ => -(other compare this)
}
- def *(factor: Double) = fromNanos(toNanos.toDouble * factor)
+ // see https://www.securecoding.cert.org/confluence/display/java/NUM00-J.+Detect+or+prevent+integer+overflow
+ private[this] def safeAdd(a: Long, b: Long): Long = {
+ if ((b > 0) && (a > Long.MaxValue - b) ||
+ (b < 0) && (a < Long.MinValue - b)) throw new IllegalArgumentException("integer overflow")
+ a + b
+ }
+ private[this] def add(otherLength: Long, otherUnit: TimeUnit): FiniteDuration = {
+ val commonUnit = if (otherUnit.convert(1, unit) == 0) unit else otherUnit
+ val totalLength = safeAdd(commonUnit.convert(length, unit), commonUnit.convert(otherLength, otherUnit))
+ new FiniteDuration(totalLength, commonUnit)
+ }
- def /(factor: Double) = fromNanos(toNanos.toDouble / factor)
+ def +(other: Duration) = other match {
+ case x: FiniteDuration => add(x.length, x.unit)
+ case _ => other
+ }
+ def -(other: Duration) = other match {
+ case x: FiniteDuration => add(-x.length, x.unit)
+ case _ => other
+ }
- def /(other: Duration) = if (other.finite_?) toNanos.toDouble / other.toNanos else 0
+ def *(factor: Double) =
+ if (!factor.isInfinite) fromNanos(toNanos * factor)
+ else if (factor.isNaN) Undefined
+ else if ((factor > 0) ^ (this < Zero)) Inf
+ else MinusInf
+
+ def /(factor: Double) =
+ if (!factor.isInfinite) fromNanos(toNanos / factor)
+ else if (factor.isNaN) Undefined
+ else Zero
+
+ // if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
+ private[this] def minusZero = -0d
+ def /(other: Duration): Double =
+ if (other.isFinite) toNanos.toDouble / other.toNanos
+ else if (other eq Undefined) Double.NaN
+ else if ((length < 0) ^ (other > Zero)) 0d
+ else minusZero
+
+ // overridden methods taking FiniteDurations, so that you can calculate while statically staying finite
+ def +(other: FiniteDuration) = add(other.length, other.unit)
+ def -(other: FiniteDuration) = add(-other.length, other.unit)
+ def plus(other: FiniteDuration) = this + other
+ def minus(other: FiniteDuration) = this - other
+ override def div(factor: Double) = this / factor
+ override def mul(factor: Double) = this * factor
+ def min(other: FiniteDuration) = if (this < other) this else other
+ def max(other: FiniteDuration) = if (this > other) this else other
def unary_- = Duration(-length, unit)
- def finite_? = true
-
- override def equals(other: Any) =
- other.isInstanceOf[FiniteDuration] &&
- toNanos == other.asInstanceOf[FiniteDuration].toNanos
+ final def isFinite() = true
- override def hashCode = toNanos.asInstanceOf[Int]
+ override def equals(other: Any) = other match {
+ case x: FiniteDuration => toNanos == x.toNanos
+ case _ => super.equals(other)
+ }
+ override def hashCode = toNanos.toInt
}
-class DurationInt(n: Int) {
+trait DurationConversions extends Any {
import duration.Classifier
+ protected def durationIn(unit: TimeUnit): FiniteDuration
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
+ def nanoseconds = durationIn(NANOSECONDS)
+ def nanos = nanoseconds
+ def nanosecond = nanoseconds
+ def nano = nanoseconds
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
+ def microseconds = durationIn(MICROSECONDS)
+ def micros = microseconds
+ def microsecond = microseconds
+ def micro = microseconds
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
+ def milliseconds = durationIn(MILLISECONDS)
+ def millis = milliseconds
+ def millisecond = milliseconds
+ def milli = milliseconds
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
+ def seconds = durationIn(SECONDS)
+ def second = seconds
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
+ def minutes = durationIn(MINUTES)
+ def minute = minutes
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
+ def hours = durationIn(HOURS)
+ def hour = hours
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
+ def days = durationIn(DAYS)
+ def day = days
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(nanoseconds)
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = nanoseconds(c)
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = nanoseconds(c)
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = nanoseconds(c)
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(microseconds)
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = microseconds(c)
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = microseconds(c)
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = microseconds(c)
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(milliseconds)
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = milliseconds(c)
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = milliseconds(c)
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = milliseconds(c)
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(seconds)
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = seconds(c)
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(minutes)
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = minutes(c)
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(hours)
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = hours(c)
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(days)
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = days(c)
}
-class DurationLong(n: Long) {
- import duration.Classifier
-
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
-
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
-
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
-
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
-
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
-
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
-
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
}
-class DurationDouble(d: Double) {
- import duration.Classifier
-
- def nanoseconds = Duration(d, NANOSECONDS)
- def nanos = Duration(d, NANOSECONDS)
- def nanosecond = Duration(d, NANOSECONDS)
- def nano = Duration(d, NANOSECONDS)
-
- def microseconds = Duration(d, MICROSECONDS)
- def micros = Duration(d, MICROSECONDS)
- def microsecond = Duration(d, MICROSECONDS)
- def micro = Duration(d, MICROSECONDS)
-
- def milliseconds = Duration(d, MILLISECONDS)
- def millis = Duration(d, MILLISECONDS)
- def millisecond = Duration(d, MILLISECONDS)
- def milli = Duration(d, MILLISECONDS)
-
- def seconds = Duration(d, SECONDS)
- def second = Duration(d, SECONDS)
-
- def minutes = Duration(d, MINUTES)
- def minute = Duration(d, MINUTES)
-
- def hours = Duration(d, HOURS)
- def hour = Duration(d, HOURS)
-
- def days = Duration(d, DAYS)
- def day = Duration(d, DAYS)
-
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
-
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
-
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
-
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
-
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
-
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
+final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+}
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
+final class DurationDouble(val d: Double) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration =
+ Duration(d, unit) match {
+ case f: FiniteDuration => f
+ case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d)
+ }
}
diff --git a/src/library/scala/concurrent/util/duration/package.scala b/src/library/scala/concurrent/util/duration/package.scala
index e3cf229c61..d5ae3f1544 100644
--- a/src/library/scala/concurrent/util/duration/package.scala
+++ b/src/library/scala/concurrent/util/duration/package.scala
@@ -1,7 +1,7 @@
package scala.concurrent.util
import java.util.concurrent.TimeUnit
-import language.implicitConversions
+import scala.language.implicitConversions
package object duration {
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index 111affc904..5ad61b811a 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -8,7 +8,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
/** An annotation that designates that a definition is deprecated.
* Access to the member then generates a deprecated warning.
@@ -18,4 +18,4 @@ import annotation.meta._
* @since 2.3
*/
@getter @setter @beanGetter @beanSetter
-class deprecated(message: String = "", since: String = "") extends annotation.StaticAnnotation
+class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
new file mode 100644
index 0000000000..eb241d0d04
--- /dev/null
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -0,0 +1,22 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** An annotation that designates that inheriting from a class is deprecated.
+ *
+ * This is usually done to warn about a non-final class being made final in a future version.
+ * Sub-classing such a class then generates a warning.
+ *
+ * @param message the message to print during compilation if the class was sub-classed
+ * @param since a string identifying the first version in which inheritance was deprecated
+ * @since 2.10
+ * @see [[scala.deprecatedOverriding]]
+ */
+private[scala] // for now, this needs to be generalized to communicate other modifier deltas
+class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala
index cc36be6775..07b35d1a61 100644
--- a/src/library/scala/deprecatedName.scala
+++ b/src/library/scala/deprecatedName.scala
@@ -8,7 +8,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
/**
* An annotation that designates the name of the parameter to which it is
@@ -29,4 +29,4 @@ import annotation.meta._
* @since 2.8.1
*/
@param
-class deprecatedName(name: Symbol) extends annotation.StaticAnnotation
+class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala
new file mode 100644
index 0000000000..c9fd3af91b
--- /dev/null
+++ b/src/library/scala/deprecatedOverriding.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** An annotation that designates that overriding a member is deprecated.
+ *
+ * Overriding such a member in a sub-class then generates a warning.
+ *
+ * @param message the message to print during compilation if the member was overridden
+ * @param since a string identifying the first version in which overriding was deprecated
+ * @since 2.10
+ * @see [[scala.deprecatedInheritance]]
+ */
+private[scala] // for the same reasons as deprecatedInheritance
+class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala
index a182fdf9ca..42ae28a347 100644
--- a/src/library/scala/inline.scala
+++ b/src/library/scala/inline.scala
@@ -17,4 +17,4 @@ package scala
* @author Lex Spoon
* @version 1.0, 2007-5-21
*/
-class inline extends annotation.StaticAnnotation
+class inline extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
index 3bb5ea9c2b..a199986141 100644
--- a/src/library/scala/io/BytePickle.scala
+++ b/src/library/scala/io/BytePickle.scala
@@ -19,6 +19,7 @@ import scala.collection.mutable
* @author Philipp Haller
* @version 1.1
*/
+@deprecated("This class will be removed.", "2.10.0")
object BytePickle {
abstract class SPU[T] {
def appP(a: T, state: PicklerState): PicklerState
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 84cac88dcc..6522cd0cd8 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -10,8 +10,8 @@
package scala.io
import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
-import annotation.migration
-import language.implicitConversions
+import scala.annotation.migration
+import scala.language.implicitConversions
// Some notes about encodings for use in refining this implementation.
//
@@ -91,7 +91,7 @@ object Codec extends LowPriorityCodecImplicits {
* as an accident, with any anomalies considered "not a bug".
*/
def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(util.Properties.encodingString)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 5d1e695add..dae478f31a 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -32,6 +32,7 @@ package scala.io
* }}}
* @author Burak Emir (translated from work by Matthias Zenger and others)
*/
+@deprecated("This class will be removed.", "2.10.0")
abstract class Position {
/** Definable behavior for overflow conditions.
*/
@@ -53,7 +54,7 @@ abstract class Position {
if (line >= LINE_MASK)
LINE_MASK << COLUMN_BITS
else
- (line << COLUMN_BITS) | math.min(COLUMN_MASK, column)
+ (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column)
}
/** Returns the line number of the encoded position. */
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
index df0a36ef21..aa6cccf1d1 100644
--- a/src/library/scala/io/UTF8Codec.scala
+++ b/src/library/scala/io/UTF8Codec.scala
@@ -13,6 +13,7 @@ package scala.io
* @author Martin Odersky
* @version 1.0, 04/10/2004
*/
+@deprecated("This class will be removed.", "2.10.0")
object UTF8Codec {
final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index df2eb0b910..297f344f65 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -17,7 +17,7 @@ object language {
* of programs. Furthermore, dynamic member selection often relies on reflection,
* which is not available on all platforms.
*/
- implicit lazy val dynamics: dynamics = ???
+ implicit lazy val dynamics: dynamics = languageFeature.dynamics
/** Only where enabled, postfix operator notation `(expr op)` will be allowed.
*
@@ -26,10 +26,10 @@ object language {
* _Why control it?_ Postfix operators interact poorly with semicolon inference.
* Most programmers avoid them for this reason.
*/
- implicit lazy val postfixOps: postfixOps = ???
+ implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
/** Only where enabled, accesses to members of structural types that need
- * reflection are supported. Reminder: A structural type is a type of the form
+ * reflection are supported. Reminder: A structural type is a type of the form
* `Parents { Decls }` where `Decls` contains declarations of new members that do
* not override any member in `Parents`. To access one of these members, a
* reflective call is needed.
@@ -42,7 +42,7 @@ object language {
* such as ProGuard have problems dealing with it. Even where reflection is available,
* reflective dispatch can lead to surprising performance degradations.
*/
- implicit lazy val reflectiveCalls: reflectiveCalls = ???
+ implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls
/** Only where enabled, definitions of implicit conversions are allowed. An
* implicit conversion is an implicit value of unary function type `A => B`,
@@ -52,7 +52,7 @@ object language {
* implicit def stringToInt(s: String): Int = s.length
* implicit val conv = (s: String) => s.length
* implicit def listToX(xs: List[T])(implicit f: T => X): X = …
- *
+ *
* implicit values of other types are not affected, and neither are implicit
* classes.
*
@@ -65,7 +65,7 @@ object language {
* most situations using implicit parameters leads to a better design than
* implicit conversions.
*/
- implicit lazy val implicitConversions: implicitConversions = ???
+ implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions
/** Only where this flag is enabled, higher-kinded types can be written.
*
@@ -86,7 +86,7 @@ object language {
* enabling also serves as a warning that code involving higher-kinded types
* might have to be slightly revised in the future.
*/
- implicit lazy val higherKinds: higherKinds = ???
+ implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds
/** Only where enabled, existential types that cannot be expressed as wildcard
* types can be written and are allowed in inferred types of values or return
@@ -95,14 +95,14 @@ object language {
*
* _Why keep the feature?_ Existential types are needed to make sense of Java’s wildcard
* types and raw types and the erased types of run-time values.
- *
+ *
* Why control it? Having complex existential types in a code base usually makes
* application code very brittle, with a tendency to produce type errors with
* obscure error messages. Therefore, going overboard with existential types
* is generally perceived not to be a good idea. Also, complicated existential types
* might be no longer supported in a future simplification of the language.
*/
- implicit lazy val existentials: existentials = ???
+ implicit lazy val existentials: existentials = languageFeature.existentials
object experimental {
@@ -110,7 +110,7 @@ object language {
/** Where enabled, macro definitions are allowed. Macro implementations and
* macro applications are unaffected; they can be used anywhere.
- *
+ *
* _Why introduce the feature?_ Macros promise to make the language more regular,
* replacing ad-hoc language constructs with a general powerful abstraction
* capability that can express them. Macros are also a more disciplined and
@@ -119,6 +119,6 @@ object language {
* _Why control it?_ For their very power, macros can lead to code that is hard
* to debug and understand.
*/
- implicit lazy val macros: macros = ???
+ implicit lazy val macros: macros = languageFeature.experimental.macros
}
}
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
index c32f1eb724..1f411c412a 100644
--- a/src/library/scala/languageFeature.scala
+++ b/src/library/scala/languageFeature.scala
@@ -1,30 +1,37 @@
package scala
-import annotation.meta
+import scala.annotation.meta
object languageFeature {
@meta.languageFeature("extension of type scala.Dynamic", enableRequired = true)
sealed trait dynamics
+ object dynamics extends dynamics
@meta.languageFeature("postfix operator #", enableRequired = false)
sealed trait postfixOps
+ object postfixOps extends postfixOps
@meta.languageFeature("reflective access of structural type member #", enableRequired = false)
sealed trait reflectiveCalls
+ object reflectiveCalls extends reflectiveCalls
@meta.languageFeature("implicit conversion #", enableRequired = false)
sealed trait implicitConversions
+ object implicitConversions extends implicitConversions
@meta.languageFeature("higher-kinded type", enableRequired = false)
sealed trait higherKinds
+ object higherKinds extends higherKinds
@meta.languageFeature("#, which cannot be expressed by wildcards, ", enableRequired = false)
sealed trait existentials
+ object existentials extends existentials
object experimental {
@meta.languageFeature("macro definition", enableRequired = true)
sealed trait macros
+ object macros extends macros
}
}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 74daa510ca..eb73d58d1c 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -12,7 +12,7 @@ package scala.math
import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
import scala.collection.immutable.NumericRange
-import language.implicitConversions
+import scala.language.implicitConversions
/**
@@ -159,6 +159,7 @@ object BigDecimal {
* @author Stephane Micheloud
* @version 1.0
*/
+@deprecatedInheritance("This class will me made final.", "2.10.0")
class BigDecimal(
val bigDecimal: BigDec,
val mc: MathContext)
@@ -211,7 +212,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
catch { case _: ArithmeticException => false }
}
- protected[math] def isWhole = (this remainder 1) == BigDecimal(0)
+ def isWhole() = (this remainder 1) == BigDecimal(0)
def underlying = bigDecimal
/** Compares this BigDecimal with the specified BigDecimal for equality.
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 4471e417d9..3eb41053f7 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -9,7 +9,7 @@
package scala.math
import java.math.BigInteger
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @author Martin Odersky
@@ -114,6 +114,7 @@ object BigInt {
* @author Martin Odersky
* @version 1.0, 15/07/2003
*/
+@deprecatedInheritance("This class will me made final.", "2.10.0")
class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
@@ -162,7 +163,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
}
/** Some implementations of java.math.BigInteger allow huge values with bit length greater than Int.MaxValue .
* The BigInteger.bitLength method returns truncated bit length in this case .
- * This method tests if result of bitLength is valid.
+ * This method tests if result of bitLength is valid.
* This method will become unnecessary if BigInt constructors reject huge BigIntegers.
*/
private def bitLengthOverflow = {
@@ -170,7 +171,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
(shifted.signum != 0) && !(shifted equals BigInt.minusOne)
}
- protected[math] def isWhole = true
+ def isWhole() = true
def underlying = bigInteger
/** Compares this BigInt with the specified BigInt for equality.
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index 0686569c16..98fd325980 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -8,7 +8,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @since 2.8
@@ -28,4 +28,4 @@ object Fractional {
implicit def infixFractionalOps[T](x: T)(implicit num: Fractional[T]): Fractional[T]#FractionalOps = new num.FractionalOps(x)
}
object Implicits extends ExtraImplicits
-} \ No newline at end of file
+}
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index 4b4de28228..e5bfc8f687 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -10,7 +10,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @since 2.8
@@ -36,4 +36,4 @@ object Integral {
implicit def infixIntegralOps[T](x: T)(implicit num: Integral[T]): Integral[T]#IntegralOps = new num.IntegralOps(x)
}
object Implicits extends ExtraImplicits
-} \ No newline at end of file
+}
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index ee62706e49..4428d9c249 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -8,7 +8,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @since 2.8
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index a5efa41b80..53b3ae81cb 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -8,7 +8,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/** A trait for data that have a single, natural ordering. See
* [[scala.math.Ordering]] before using this trait for
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 9020bb9edd..719f2e12a7 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
-import language.{implicitConversions, higherKinds}
+import scala.language.{implicitConversions, higherKinds}
/** Ordering is a trait whose instances each represent a strategy for sorting
* instances of a type.
@@ -165,7 +166,7 @@ object Ordering extends LowPriorityOrderingImplicits {
/** Not in the standard scope due to the potential for divergence:
* For instance `implicitly[Ordering[Any]]` diverges in its presence.
*/
- implicit def seqDerivedOrdering[CC[X] <: collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] =
+ implicit def seqDerivedOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] =
new Ordering[CC[T]] {
def compare(x: CC[T], y: CC[T]): Int = {
val xe = x.iterator
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index 2b7ef7405c..edf243e5df 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -13,7 +13,17 @@ import java.{ lang => jl }
/** Conversions which present a consistent conversion interface
* across all the numeric types.
*/
-trait ScalaNumericConversions extends ScalaNumber {
+trait ScalaNumericConversions extends Any {
+ def isWhole(): Boolean
+ def underlying(): Any
+
+ def byteValue(): Byte = intValue().toByte
+ def shortValue(): Short = intValue().toShort
+ def intValue(): Int
+ def longValue(): Long
+ def floatValue(): Float
+ def doubleValue(): Double
+
/** Returns the value of this as a [[scala.Char]]. This may involve
* rounding or truncation.
*/
diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala
index 28bb9f70fd..798af3a5da 100644
--- a/src/library/scala/native.scala
+++ b/src/library/scala/native.scala
@@ -20,4 +20,4 @@ package scala
* but it is type checked when present.
*
* @since 2.6 */
-class native extends annotation.StaticAnnotation {}
+class native extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala
index de650ed209..7cb9b3d53c 100644
--- a/src/library/scala/noinline.scala
+++ b/src/library/scala/noinline.scala
@@ -18,4 +18,4 @@ package scala
* @version 1.0, 2007-5-21
* @since 2.5
*/
-class noinline extends annotation.StaticAnnotation
+class noinline extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 5255c44f10..1a574836c0 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -1,24 +1,25 @@
-package scala.reflect
+package scala
+package reflect
import java.lang.{ Class => jClass }
-import language.{implicitConversions, existentials}
-import scala.runtime.ScalaRunTime.arrayClass
+import scala.language.{implicitConversions, existentials}
+import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
-/** A `ClassTag[T]` wraps a runtime class, which can be accessed via the `runtimeClass` method.
+/** A `ClassTag[T]` wraps a runtime class (the erasure) and can create array instances.
*
- * This is useful in itself, but also enables very important use case.
- * Having this knowledge ClassTag can instantiate `Arrays`
- * in those cases where the element type is unknown at compile time.
+ * If an implicit value of type ClassTag[T] is requested, the compiler will create one.
+ * The runtime class (i.e. the erasure, a java.lang.Class on the JVM) of T can be accessed
+ * via the `runtimeClass` field. References to type parameters or abstract type members are
+ * replaced by the concrete types if ClassTags are available for them.
+ *
+ * Besides accessing the erasure, a ClassTag knows how to instantiate single- and multi-
+ * dimensional `Arrays` where the element type is unknown at compile time.
*
- * If an implicit value of type u.ClassTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its `runtimeClass` field the runtime class that is the result of erasing type T.
- * In that value, any occurrences of type parameters or abstract types U which come themselves with a ClassTag
- * are represented by the type referenced by that tag.
- * If the type T contains unresolved references to type parameters or abstract types, a static error results.
+ * [[scala.reflect.ClassTag]] corresponds to a previous concept of [[scala.reflect.ClassManifest]].
*
* @see [[scala.reflect.base.TypeTags]]
*/
-@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
+@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
// class tags, and all tags in general, should be as minimalistic as possible
@@ -54,35 +55,62 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
* `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
* is uncheckable, but we have an instance of `ClassTag[T]`.
*/
- def unapply(x: Any): Option[T] = if (x != null && runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
+ def unapply(x: Any): Option[T] = unapply_impl(x)
+ def unapply(x: Byte): Option[T] = unapply_impl(x)
+ def unapply(x: Short): Option[T] = unapply_impl(x)
+ def unapply(x: Char): Option[T] = unapply_impl(x)
+ def unapply(x: Int): Option[T] = unapply_impl(x)
+ def unapply(x: Long): Option[T] = unapply_impl(x)
+ def unapply(x: Float): Option[T] = unapply_impl(x)
+ def unapply(x: Double): Option[T] = unapply_impl(x)
+ def unapply(x: Boolean): Option[T] = unapply_impl(x)
+ def unapply(x: Unit): Option[T] = unapply_impl(x)
- /** case class accessories */
+ private def unapply_impl[U: ClassTag](x: U): Option[T] =
+ if (x == null) None
+ else {
+ val staticClass = classTag[U].runtimeClass
+ val dynamicClass = x.getClass
+ val effectiveClass = if (staticClass.isPrimitive) staticClass else dynamicClass
+ val conforms = runtimeClass.isAssignableFrom(effectiveClass)
+ if (conforms) Some(x.asInstanceOf[T]) else None
+ }
+
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass
override def hashCode = scala.runtime.ScalaRunTime.hash(runtimeClass)
- override def toString = "ClassTag[" + runtimeClass + "]"
+ override def toString = {
+ def prettyprint(clazz: jClass[_]): String =
+ if (clazz.isArray) s"Array[${prettyprint(arrayElementClass(clazz))}]" else
+ clazz.getName
+ prettyprint(runtimeClass)
+ }
}
+/**
+ * Class tags corresponding to primitive types and constructor/extractor for ClassTags.
+ */
object ClassTag {
+ private val ObjectTYPE = classOf[java.lang.Object]
private val NothingTYPE = classOf[scala.runtime.Nothing$]
private val NullTYPE = classOf[scala.runtime.Null$]
- private val ObjectTYPE = classOf[java.lang.Object]
- val Byte : ClassTag[scala.Byte] = new ClassTag[scala.Byte]{ def runtimeClass = java.lang.Byte.TYPE; private def readResolve() = ClassTag.Byte }
- val Short : ClassTag[scala.Short] = new ClassTag[scala.Short]{ def runtimeClass = java.lang.Short.TYPE; private def readResolve() = ClassTag.Short }
- val Char : ClassTag[scala.Char] = new ClassTag[scala.Char]{ def runtimeClass = java.lang.Character.TYPE; private def readResolve() = ClassTag.Char }
- val Int : ClassTag[scala.Int] = new ClassTag[scala.Int]{ def runtimeClass = java.lang.Integer.TYPE; private def readResolve() = ClassTag.Int }
- val Long : ClassTag[scala.Long] = new ClassTag[scala.Long]{ def runtimeClass = java.lang.Long.TYPE; private def readResolve() = ClassTag.Long }
- val Float : ClassTag[scala.Float] = new ClassTag[scala.Float]{ def runtimeClass = java.lang.Float.TYPE; private def readResolve() = ClassTag.Float }
- val Double : ClassTag[scala.Double] = new ClassTag[scala.Double]{ def runtimeClass = java.lang.Double.TYPE; private def readResolve() = ClassTag.Double }
- val Boolean : ClassTag[scala.Boolean] = new ClassTag[scala.Boolean]{ def runtimeClass = java.lang.Boolean.TYPE; private def readResolve() = ClassTag.Boolean }
- val Unit : ClassTag[scala.Unit] = new ClassTag[scala.Unit]{ def runtimeClass = java.lang.Void.TYPE; private def readResolve() = ClassTag.Unit }
- val Any : ClassTag[scala.Any] = new ClassTag[scala.Any]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Any }
- val Object : ClassTag[java.lang.Object] = new ClassTag[java.lang.Object]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Object }
- val AnyVal : ClassTag[scala.AnyVal] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyVal]]
- val AnyRef : ClassTag[scala.AnyRef] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyRef]]
- val Nothing : ClassTag[scala.Nothing] = new ClassTag[scala.Nothing]{ def runtimeClass = NothingTYPE; private def readResolve() = ClassTag.Nothing }
- val Null : ClassTag[scala.Null] = new ClassTag[scala.Null]{ def runtimeClass = NullTYPE; private def readResolve() = ClassTag.Null }
+ val Byte : ClassTag[scala.Byte] = Manifest.Byte
+ val Short : ClassTag[scala.Short] = Manifest.Short
+ val Char : ClassTag[scala.Char] = Manifest.Char
+ val Int : ClassTag[scala.Int] = Manifest.Int
+ val Long : ClassTag[scala.Long] = Manifest.Long
+ val Float : ClassTag[scala.Float] = Manifest.Float
+ val Double : ClassTag[scala.Double] = Manifest.Double
+ val Boolean : ClassTag[scala.Boolean] = Manifest.Boolean
+ val Unit : ClassTag[scala.Unit] = Manifest.Unit
+ val Any : ClassTag[scala.Any] = Manifest.Any
+ val Object : ClassTag[java.lang.Object] = Manifest.Object
+ val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal
+ val AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef
+ val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing
+ val Null : ClassTag[scala.Null] = Manifest.Null
def apply[T](runtimeClass1: jClass[_]): ClassTag[T] =
runtimeClass1 match {
@@ -96,8 +124,10 @@ object ClassTag {
case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
+ case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]]
+ case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]]
case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 }
}
def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass)
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 9347f5b6bb..8b021e0444 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -38,7 +38,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
}}}
*
*/
-@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
@deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -155,28 +155,34 @@ object ManifestFactory {
private def readResolve(): Any = Manifest.Unit
}
- val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any]("Any") {
+ private val ObjectTYPE = classOf[java.lang.Object]
+ private val NothingTYPE = classOf[scala.runtime.Nothing$]
+ private val NullTYPE = classOf[scala.runtime.Null$]
+
+ val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any](ObjectTYPE, "Any") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
private def readResolve(): Any = Manifest.Any
}
- val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object]("Object") {
+ val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object](ObjectTYPE, "Object") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.Object
}
- val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal]("AnyVal") {
+ val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]]
+
+ val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.AnyVal
}
- val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null]("Null") {
+ val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null](NullTYPE, "Null") {
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
private def readResolve(): Any = Manifest.Null
}
- val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing]("Nothing") {
+ val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") {
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
private def readResolve(): Any = Manifest.Nothing
}
@@ -211,7 +217,8 @@ object ManifestFactory {
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
- private abstract class PhantomManifest[T](override val toString: String) extends ClassTypeManifest[T](None, classOf[java.lang.Object], Nil) {
+ private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_],
+ override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) {
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
override val hashCode = System.identityHashCode(this)
}
@@ -257,4 +264,4 @@ object ManifestFactory {
def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index ff56e20d52..77cbd20321 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** Provides functions to encode and decode Scala symbolic names.
* Also provides some constants.
diff --git a/src/library/scala/reflect/base/AnnotationInfos.scala b/src/library/scala/reflect/base/AnnotationInfos.scala
deleted file mode 100644
index f03644deef..0000000000
--- a/src/library/scala/reflect/base/AnnotationInfos.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.reflect
-package base
-
-trait AnnotationInfos { self: Universe =>
-
- type AnnotationInfo >: Null <: AnyRef
- implicit val AnnotationInfoTag: ClassTag[AnnotationInfo]
- val AnnotationInfo: AnnotationInfoExtractor
-
- abstract class AnnotationInfoExtractor {
- def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
- def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
- }
-
- type ClassfileAnnotArg >: Null <: AnyRef
- implicit val ClassfileAnnotArgTag: ClassTag[ClassfileAnnotArg]
-
- type LiteralAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val LiteralAnnotArgTag: ClassTag[LiteralAnnotArg]
- val LiteralAnnotArg: LiteralAnnotArgExtractor
-
- abstract class LiteralAnnotArgExtractor {
- def apply(const: Constant): LiteralAnnotArg
- def unapply(arg: LiteralAnnotArg): Option[Constant]
- }
-
- type ArrayAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val ArrayAnnotArgTag: ClassTag[ArrayAnnotArg]
- val ArrayAnnotArg: ArrayAnnotArgExtractor
-
- abstract class ArrayAnnotArgExtractor {
- def apply(args: Array[ClassfileAnnotArg]): ArrayAnnotArg
- def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
- }
-
- type NestedAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val NestedAnnotArgTag: ClassTag[NestedAnnotArg]
- val NestedAnnotArg: NestedAnnotArgExtractor
-
- abstract class NestedAnnotArgExtractor {
- def apply(annInfo: AnnotationInfo): NestedAnnotArg
- def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Annotations.scala b/src/library/scala/reflect/base/Annotations.scala
new file mode 100644
index 0000000000..107443f09b
--- /dev/null
+++ b/src/library/scala/reflect/base/Annotations.scala
@@ -0,0 +1,106 @@
+package scala.reflect
+package base
+
+import scala.collection.immutable.ListMap
+
+/**
+ * Defines the type hierarchy for annotations.
+ */
+trait Annotations { self: Universe =>
+
+ /** Typed information about an annotation. It can be attached to either a symbol or an annotated type.
+ *
+ * Annotations are either ''Scala annotations'', which conform to [[scala.annotation.StaticAnnotation]]
+ * or ''Java annotations'', which conform to [[scala.annotation.ClassfileAnnotation]].
+ * Trait `ClassfileAnnotation` is automatically added to every Java annotation by the scalac classfile parser.
+ */
+ type Annotation >: Null <: AnyRef
+
+ /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AnnotationTag: ClassTag[Annotation]
+
+ /** The constructor/deconstructor for `Annotation` instances. */
+ val Annotation: AnnotationExtractor
+
+ /** An extractor class to create and pattern match with syntax `Annotation(atp, scalaArgs, javaArgs)`.
+ * Here, `atp` is the annotation type, `scalaArgs` the arguments, and `javaArgs` the annotation's key-value
+ * pairs.
+ *
+ * Annotations are pickled, i.e. written to scala symtab attribute in the classfile.
+ * Annotations are written to the classfile as Java annotations if `atp` conforms to `ClassfileAnnotation`.
+ *
+ * For Scala annotations, arguments are stored in `scalaArgs` and `javaArgs` is empty. Arguments in
+ * `scalaArgs` are represented as typed trees. Note that these trees are not transformed by any phases
+ * following the type-checker. For Java annotations, `scalaArgs` is empty and arguments are stored in
+ * `javaArgs`.
+ */
+ abstract class AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
+ def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
+ }
+
+ /** A Java annotation argument */
+ type JavaArgument >: Null <: AnyRef
+ implicit val JavaArgumentTag: ClassTag[JavaArgument]
+
+ /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`*/
+ type LiteralArgument >: Null <: AnyRef with JavaArgument
+
+ /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+
+ /** The constructor/deconstructor for `LiteralArgument` instances. */
+ val LiteralArgument: LiteralArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
+ * where `value` is the constant argument.
+ */
+ abstract class LiteralArgumentExtractor {
+ def apply(value: Constant): LiteralArgument
+ def unapply(arg: LiteralArgument): Option[Constant]
+ }
+
+ /** An array argument to a Java annotation as in `@Target(value={TYPE,FIELD,METHOD,PARAMETER})`
+ */
+ type ArrayArgument >: Null <: AnyRef with JavaArgument
+
+ /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+
+ /** The constructor/deconstructor for `ArrayArgument` instances. */
+ val ArrayArgument: ArrayArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
+ * where `args` is the argument array.
+ */
+ abstract class ArrayArgumentExtractor {
+ def apply(args: Array[JavaArgument]): ArrayArgument
+ def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
+ }
+
+ /** A nested annotation argument to a Java annotation as `@Nested` in `@Outer(@Nested)`.
+ */
+ type NestedArgument >: Null <: AnyRef with JavaArgument
+
+ /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+ /** The constructor/deconstructor for `NestedArgument` instances. */
+ val NestedArgument: NestedArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
+ * where `annotation` is the nested annotation.
+ */
+ abstract class NestedArgumentExtractor {
+ def apply(annotation: Annotation): NestedArgument
+ def unapply(arg: NestedArgument): Option[Annotation]
+ }
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Attachments.scala b/src/library/scala/reflect/base/Attachments.scala
index 43e870fc4f..479ab9a857 100644
--- a/src/library/scala/reflect/base/Attachments.scala
+++ b/src/library/scala/reflect/base/Attachments.scala
@@ -1,34 +1,44 @@
package scala.reflect
package base
-/** Attachments is a generalisation of Position.
- * Typically it stores a Position of a tree, but this can be extended to encompass arbitrary payloads.
+/** Attachments is a generalization of Position. Typically it stores a Position of a tree, but this can be extended to
+ * encompass arbitrary payloads. Payloads are stored in type-indexed slots, which can be read with `get[T]` and written
+ * with `update[T]` and `remove[T]`.
*
- * Attachments have to carry positions, because we don't want to introduce even a single additional field in Tree
+ * Attachments always carry positions because we don't want to introduce an additional field for attachments in `Tree`
* imposing an unnecessary memory tax because of something that will not be used in most cases.
*/
abstract class Attachments { self =>
+ /** The position type of this attachment */
type Pos >: Null
- /** Gets the underlying position */
+ /** The underlying position */
def pos: Pos
- /** Creates a copy of this attachment with its position updated */
+ /** Creates a copy of this attachment with the position replaced by `newPos` */
def withPos(newPos: Pos): Attachments { type Pos = self.Pos }
- /** Gets the underlying payload */
+ /** The underlying payload with the guarantee that no two elements have the same type. */
def all: Set[Any] = Set.empty
+ private def matchesTag[T: ClassTag](datum: Any) =
+ classTag[T].runtimeClass == datum.getClass
+
+ /** An underlying payload of the given class type `T`. */
def get[T: ClassTag]: Option[T] =
- (all find (_.getClass == classTag[T].runtimeClass)).asInstanceOf[Option[T]]
+ (all filter matchesTag[T]).headOption.asInstanceOf[Option[T]]
- /** Creates a copy of this attachment with its payload updated */
- def add(attachment: Any): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, all + attachment)
+ /** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
+ *
+ * Replaces an existing payload of the same type, if exists.
+ */
+ def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
+ new NonemptyAttachments(this.pos, remove[T].all + attachment)
+ /** Creates a copy of this attachment with the payload of the given class type `T` removed. */
def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
- val newAll = all filterNot (_.getClass == classTag[T].runtimeClass)
+ val newAll = all filterNot matchesTag[T]
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
else new NonemptyAttachments(this.pos, newAll)
}
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
index 28ebdf4377..6ecfd384ab 100644
--- a/src/library/scala/reflect/base/Base.scala
+++ b/src/library/scala/reflect/base/Base.scala
@@ -4,8 +4,12 @@ package base
import java.io.PrintWriter
import scala.annotation.switch
import scala.ref.WeakReference
-import collection.mutable
+import scala.collection.mutable
+import scala.collection.immutable.ListMap
+/**
+ * This is an internal implementation class.
+ */
class Base extends Universe { self =>
private var nextId = 0
@@ -36,10 +40,10 @@ class Base extends Universe { self =>
def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol =
new ClassSymbol(this, name, flags)
- def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null) =
+ def newFreeTermSymbol(name: TermName, value: => Any, flags: FlagSet = NoFlags, origin: String = null) =
new FreeTermSymbol(this, name, flags)
- def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null) =
+ def newFreeTypeSymbol(name: TypeName, flags: FlagSet = NoFlags, origin: String = null) =
new FreeTypeSymbol(this, name, flags)
private def kindString: String =
@@ -157,7 +161,7 @@ class Base extends Universe { self =>
object ExistentialType extends ExistentialTypeExtractor
implicit val ExistentialTypeTag = ClassTag[ExistentialType](classOf[ExistentialType])
- case class AnnotatedType(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol) extends Type { override def typeSymbol = underlying.typeSymbol }
+ case class AnnotatedType(annotations: List[Annotation], underlying: Type, selfsym: Symbol) extends Type { override def typeSymbol = underlying.typeSymbol }
object AnnotatedType extends AnnotatedTypeExtractor
implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
@@ -249,24 +253,24 @@ class Base extends Universe { self =>
object Constant extends ConstantExtractor
implicit val ConstantTag = ClassTag[Constant](classOf[Constant])
- case class AnnotationInfo(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)])
- object AnnotationInfo extends AnnotationInfoExtractor
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
+ case class Annotation(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument])
+ object Annotation extends AnnotationExtractor
+ implicit val AnnotationTag = ClassTag[Annotation](classOf[Annotation])
- abstract class ClassfileAnnotArg
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ abstract class JavaArgument
+ implicit val JavaArgumentTag = ClassTag[JavaArgument](classOf[JavaArgument])
- case class LiteralAnnotArg(const: Constant) extends ClassfileAnnotArg
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
+ case class LiteralArgument(value: Constant) extends JavaArgument
+ object LiteralArgument extends LiteralArgumentExtractor
+ implicit val LiteralArgumentTag = ClassTag[LiteralArgument](classOf[LiteralArgument])
- case class ArrayAnnotArg(args: Array[ClassfileAnnotArg]) extends ClassfileAnnotArg
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ case class ArrayArgument(args: Array[JavaArgument]) extends JavaArgument
+ object ArrayArgument extends ArrayArgumentExtractor
+ implicit val ArrayArgumentTag = ClassTag[ArrayArgument](classOf[ArrayArgument])
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg
- object NestedAnnotArg extends NestedAnnotArgExtractor
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
+ case class NestedArgument(annotation: Annotation) extends JavaArgument
+ object NestedArgument extends NestedArgumentExtractor
+ implicit val NestedArgumentTag = ClassTag[NestedArgument](classOf[NestedArgument])
class Position extends Attachments {
override type Pos = Position
@@ -311,18 +315,15 @@ class Base extends Universe { self =>
else new TypeSymbol(owner, name.toTypeName, flags)
else new TermSymbol(owner, name.toTermName, flags)
- def newFreeTerm(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
new FreeTermSymbol(rootMirror.RootClass, newTermName(name), flags)
- def newFreeType(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- new FreeTypeSymbol(rootMirror.RootClass, newTypeName(name), flags)
-
- def newFreeExistential(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
new FreeTypeSymbol(rootMirror.RootClass, newTypeName(name), flags)
def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = sym
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S = sym
+ def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S = sym
def flagsFromBits(bits: Long): FlagSet = bits
@@ -626,10 +627,6 @@ class Base extends Universe { self =>
extends GenericApply
object Apply extends ApplyExtractor
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- object ApplyDynamic extends ApplyDynamicExtractor
-
case class Super(qual: Tree, mix: TypeName) extends TermTree
object Super extends SuperExtractor
@@ -728,7 +725,6 @@ class Base extends Universe { self =>
implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
implicit val SuperTag = ClassTag[Super](classOf[Super])
implicit val ThisTag = ClassTag[This](classOf[This])
implicit val SelectTag = ClassTag[Select](classOf[Select])
diff --git a/src/library/scala/reflect/base/BuildUtils.scala b/src/library/scala/reflect/base/BuildUtils.scala
index 98f32231ad..5982329aef 100644
--- a/src/library/scala/reflect/base/BuildUtils.scala
+++ b/src/library/scala/reflect/base/BuildUtils.scala
@@ -1,6 +1,9 @@
package scala.reflect
package base
+/**
+ * This is an internal implementation class.
+ */
trait BuildUtils { self: Universe =>
val build: BuildBase
@@ -29,36 +32,18 @@ trait BuildUtils { self: Universe =>
/** Create a fresh free term symbol.
* @param name the name of the free variable
- * @param info the type signature of the free variable
* @param value the value of the free variable at runtime
* @param flags (optional) flags of the free variable
* @param origin debug information that tells where this symbol comes from
*/
- def newFreeTerm(name: String, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
+ def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
- /** Create a fresh free non-existential type symbol.
+ /** Create a fresh free type symbol.
* @param name the name of the free variable
- * @param info the type signature of the free variable
- * @param value a type tag that captures the value of the free variable
- * is completely phantom, since the captured type cannot be propagated to the runtime
- * if it could be, we wouldn't be creating a free type to begin with
- * the only usage for it is preserving the captured symbol for compile-time analysis
* @param flags (optional) flags of the free variable
* @param origin debug information that tells where this symbol comes from
*/
- def newFreeType(name: String, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
-
- /** Create a fresh free existential type symbol.
- * @param name the name of the free variable
- * @param info the type signature of the free variable
- * @param value a type tag that captures the value of the free variable
- * is completely phantom, since the captured type cannot be propagated to the runtime
- * if it could be, we wouldn't be creating a free type to begin with
- * the only usage for it is preserving the captured symbol for compile-time analysis
- * @param flags (optional) flags of the free variable
- * @param origin (optional) debug information that tells where this symbol comes from
- */
- def newFreeExistential(name: String, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
+ def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
/** Set symbol's type signature to given type.
* @return the symbol itself
@@ -67,7 +52,7 @@ trait BuildUtils { self: Universe =>
/** Set symbol's annotations to given annotations `annots`.
*/
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S
+ def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
def flagsFromBits(bits: Long): FlagSet
diff --git a/src/library/scala/reflect/base/Constants.scala b/src/library/scala/reflect/base/Constants.scala
index ba12b02e92..240434362d 100644
--- a/src/library/scala/reflect/base/Constants.scala
+++ b/src/library/scala/reflect/base/Constants.scala
@@ -6,13 +6,29 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for compile-time constants.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Constants {
self: Universe =>
+ /** The type of compile-time constants.
+ */
type Constant >: Null <: AnyRef
+
+ /** A tag that preserves the identity of the `Constant` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
implicit val ConstantTag: ClassTag[Constant]
+
+ /** The constructor/deconstructor for `Constant` instances. */
val Constant: ConstantExtractor
+ /** An extractor class to create and pattern match with syntax `Constant(value)`
+ * where `value` is the Scala value of the constant.
+ */
abstract class ConstantExtractor {
def apply(value: Any): Constant
def unapply(arg: Constant): Option[Any]
diff --git a/src/library/scala/reflect/base/Exprs.scala b/src/library/scala/reflect/base/Exprs.scala
index 10c222722a..bd15c65711 100644
--- a/src/library/scala/reflect/base/Exprs.scala
+++ b/src/library/scala/reflect/base/Exprs.scala
@@ -8,16 +8,71 @@ package base
trait Exprs { self: Universe =>
- /** An expression tree tagged with its type */
+ /** Expr wraps an expression tree and tags it with its type. */
trait Expr[+T] extends Equals with Serializable {
val mirror: Mirror
+ /**
+ * Migrates the expression into another mirror, jumping into a different universe if necessary.
+ *
+ * This means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T]
+ /**
+ * The Scala syntax tree representing the wrapped expression.
+ */
def tree: Tree
+
+ /**
+ * Representation of the type of the wrapped expression tree as found via type tags.
+ */
def staticType: Type
+ /**
+ * Representation of the type of the wrapped expression tree as found in the tree.
+ */
def actualType: Type
+ /**
+ * A dummy method to mark expression splicing in reification.
+ *
+ * It should only be used within a `reify` call, which eliminates the `splice` call and embeds
+ * the wrapped tree into the reified surrounding expression.
+ * If used alone `splice` throws an exception when called at runtime.
+ *
+ * If you want to use an Expr in reification of some Scala code, you need to splice it in.
+ * For an expr of type `Expr[T]`, where `T` has a method `foo`, the following code
+ * {{{
+ * reify{ expr.splice.foo }
+ * }}}
+ * uses splice to turn an expr of type Expr[T] into a value of type T in the context of `reify`.
+ *
+ * It is equivalent to
+ * {{{
+ * Select( expr.tree, newTermName("foo") )
+ * }}}
+ *
+ * The following example code however does not compile
+ * {{{
+ * reify{ expr.foo }
+ * }}}
+ * because expr of type Expr[T] itself does not have a method foo.
+ */
def splice: T
+ /**
+ * A dummy value to denote cross-stage path-dependent type dependencies.
+ *
+ * For example for the following macro definition:
+ * {{{
+ * class X { type T }
+ * object Macros { def foo(x: X): x.T = macro Impls.foo_impl }
+ * }}}
+ *
+ * The corresponding macro implementation should have the following signature (note how the return type denotes path-dependency on x):
+ * {{{
+ * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
+ * }}}
+ */
val value: T
/** case class accessories */
@@ -27,20 +82,26 @@ trait Exprs { self: Universe =>
override def toString = "Expr["+staticType+"]("+tree+")"
}
+ /**
+ * Constructor/Extractor for Expr.
+ *
+ * Can be useful, when having a tree and wanting to splice it in reify call,
+ * in which case the tree first needs to be wrapped in an expr.
+ */
object Expr {
- def apply[T: AbsTypeTag](mirror: MirrorOf[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
+ def apply[T: WeakTypeTag](mirror: MirrorOf[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
def unapply[T](expr: Expr[T]): Option[Tree] = Some(expr.tree)
}
- private class ExprImpl[+T: AbsTypeTag](val mirror: Mirror, val treec: TreeCreator) extends Expr[T] {
+ private class ExprImpl[+T: WeakTypeTag](val mirror: Mirror, val treec: TreeCreator) extends Expr[T] {
def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T] = {
val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- val tag1 = (implicitly[AbsTypeTag[T]] in otherMirror).asInstanceOf[otherMirror.universe.AbsTypeTag[T]]
+ val tag1 = (implicitly[WeakTypeTag[T]] in otherMirror).asInstanceOf[otherMirror.universe.WeakTypeTag[T]]
otherMirror.universe.Expr[T](otherMirror1, treec)(tag1)
}
lazy val tree: Tree = treec(mirror)
- lazy val staticType: Type = implicitly[AbsTypeTag[T]].tpe
+ lazy val staticType: Type = implicitly[WeakTypeTag[T]].tpe
def actualType: Type = treeType(tree)
def splice: T = throw new UnsupportedOperationException("""
@@ -54,11 +115,11 @@ trait Exprs { self: Universe =>
|if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath,
|import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
- private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[AbsTypeTag[T]].in(scala.reflect.basis.rootMirror))
+ private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[WeakTypeTag[T]].in(scala.reflect.basis.rootMirror))
}
}
-private[scala] class SerializedExpr(var treec: TreeCreator, var tag: scala.reflect.basis.AbsTypeTag[_]) extends Serializable {
+private[scala] class SerializedExpr(var treec: TreeCreator, var tag: scala.reflect.basis.WeakTypeTag[_]) extends Serializable {
private def writeObject(out: java.io.ObjectOutputStream): Unit = {
out.writeObject(treec)
out.writeObject(tag)
@@ -66,7 +127,7 @@ private[scala] class SerializedExpr(var treec: TreeCreator, var tag: scala.refle
private def readObject(in: java.io.ObjectInputStream): Unit = {
treec = in.readObject().asInstanceOf[TreeCreator]
- tag = in.readObject().asInstanceOf[scala.reflect.basis.AbsTypeTag[_]]
+ tag = in.readObject().asInstanceOf[scala.reflect.basis.WeakTypeTag[_]]
}
private def readResolve(): AnyRef = {
diff --git a/src/library/scala/reflect/base/FlagSets.scala b/src/library/scala/reflect/base/FlagSets.scala
index 96cdbe894c..0ce7613eb3 100644
--- a/src/library/scala/reflect/base/FlagSets.scala
+++ b/src/library/scala/reflect/base/FlagSets.scala
@@ -3,7 +3,7 @@ package base
trait FlagSets { self: Universe =>
- /** An abstract type representing sets of flags that apply to definition trees and symbols */
+ /** An abstract type representing sets of flags (like private, final, etc.) that apply to definition trees and symbols */
type FlagSet
/** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
diff --git a/src/library/scala/reflect/base/MirrorOf.scala b/src/library/scala/reflect/base/MirrorOf.scala
index 1e9619d062..4e54a2fae7 100644
--- a/src/library/scala/reflect/base/MirrorOf.scala
+++ b/src/library/scala/reflect/base/MirrorOf.scala
@@ -1,14 +1,33 @@
package scala.reflect
package base
+/**
+ * The base interface for all mirrors.
+ *
+ * @tparam U the type of the universe this mirror belongs to.
+ *
+ * This is defined outside the reflection universe cake pattern implementation
+ * so that it can be referenced from outside. For example TypeCreator and TreeCreator
+ * reference MirrorOf and also need to be defined outside the cake as they are
+ * used by type tags, which can be migrated between different universes and consequently
+ * cannot be bound to a fixed one.
+ *
+ * @see [[Mirrors]]
+ */
abstract class MirrorOf[U <: base.Universe with Singleton] {
- /** .. */
+ /** The universe this mirror belongs to. */
val universe: U
- /** .. */
+ /** The class symbol of the `_root_` package */
def RootClass: U#ClassSymbol
+
+ /** The module symbol of the `_root_` package */
def RootPackage: U#ModuleSymbol
+
+ /** The module class symbol of the default (unnamed) package */
def EmptyPackageClass: U#ClassSymbol
+
+ /** The module symbol of the default (unnamed) package */
def EmptyPackage: U#ModuleSymbol
/** The symbol corresponding to the globally accessible class with the
diff --git a/src/library/scala/reflect/base/Mirrors.scala b/src/library/scala/reflect/base/Mirrors.scala
index 50866ef000..e38a3d1cdd 100644
--- a/src/library/scala/reflect/base/Mirrors.scala
+++ b/src/library/scala/reflect/base/Mirrors.scala
@@ -1,12 +1,22 @@
package scala.reflect
package base
+/**
+ * Defines a type hierarchy for mirrors.
+ *
+ * Every universe has one or more mirrors. A mirror defines a hierarchy of symbols starting with the root package `_root_`
+ * and provides methods to locate and define classes and singleton objects in that hierarchy.
+ *
+ * On the JVM, there is a one to one correspondance between class loaders and mirrors.
+ */
trait Mirrors {
self: Universe =>
- /** .. */
+ /** The base type of all mirrors of this universe */
type Mirror >: Null <: MirrorOf[self.type]
- /** .. */
+ /** The root mirror of this universe. This mirror contains standard Scala classes and types such as `Any`, `AnyRef`, `AnyVal`,
+ * `Nothing`, `Null`, and all classes loaded from scala-library, which are shared across all mirrors within the enclosing universe.
+ */
val rootMirror: Mirror
}
diff --git a/src/library/scala/reflect/base/Names.scala b/src/library/scala/reflect/base/Names.scala
index 532b780e7e..b02038a920 100644
--- a/src/library/scala/reflect/base/Names.scala
+++ b/src/library/scala/reflect/base/Names.scala
@@ -1,23 +1,29 @@
package scala.reflect
package base
-import language.implicitConversions
+import scala.language.implicitConversions
/** A trait that manages names.
- * A name is a string in one of two name universes: terms and types.
- * The same string can be a name in both universes.
- * Two names are equal if they represent the same string and they are
- * members of the same universe.
- *
- * Names are interned. That is, for two names `name11 and `name2`,
- * `name1 == name2` implies `name1 eq name2`.
+ *
+ * @see TermName
+ * @see TypeName
*/
trait Names {
- /** Intentionally no implicit from String => Name. */
+ // Intentionally no implicit from String => Name.
implicit def stringToTermName(s: String): TermName = newTermName(s)
implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
- /** The abstract type of names */
+ /**
+ * The abstract type of names
+ *
+ * A Name wraps a string as the name for either a type ([[TypeName]]) of a term ([[TermName]]).
+ * Two names are equal, if the wrapped string are equal and they are either both `TypeName` or both `TermName`.
+ * The same string can co-exist as a `TypeName` and a `TermName`, but they would not be equal.
+ * Names are interned. That is, for two names `name11 and `name2`,
+ * `name1 == name2` implies `name1 eq name2`.
+ *
+ * One of the reasons for the existence of names rather than plain strings is being more explicit about what is a name and if it represents a type or a term.
+ */
type Name >: Null <: NameBase
implicit val NameTag: ClassTag[Name]
@@ -31,16 +37,16 @@ trait Names {
/** The base API that all names support */
abstract class NameBase {
- /** Is this name a term name? */
+ /** Checks wether the name is a a term name */
def isTermName: Boolean
- /** Is this name a type name? */
+ /** Checks wether the name is a a type name */
def isTypeName: Boolean
- /** Returns a term name that represents the same string as this name */
+ /** Returns a term name that wraps the same string as `this` */
def toTermName: TermName
- /** Returns a type name that represents the same string as this name */
+ /** Returns a type name that wraps the same string as `this` */
def toTypeName: TypeName
}
@@ -52,7 +58,11 @@ trait Names {
*/
def newTypeName(s: String): TypeName
+ /** Wraps the empty string. Can be used as the null object for term name.
+ */
def EmptyTermName: TermName = newTermName("")
+ /** Wraps the empty string. Can be used as the null object for term name.
+ */
def EmptyTypeName: TypeName = EmptyTermName.toTypeName
}
diff --git a/src/library/scala/reflect/base/Positions.scala b/src/library/scala/reflect/base/Positions.scala
index 76a7382e9e..70412a2f4b 100644
--- a/src/library/scala/reflect/base/Positions.scala
+++ b/src/library/scala/reflect/base/Positions.scala
@@ -1,17 +1,22 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for positions.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Positions {
self: Universe =>
- /** .. */
+ /** The base type for all positions of tree nodes in source files. */
type Position >: Null <: Attachments { type Pos = Position }
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
+ /** A tag that preserves the identity of the `Position` abstract type from erasure.
* Can be used for pattern matching, instance tests, serialization and likes.
*/
implicit val PositionTag: ClassTag[Position]
- /** .. */
+ /** A special "missing" position. */
val NoPosition: Position
}
diff --git a/src/library/scala/reflect/base/Scopes.scala b/src/library/scala/reflect/base/Scopes.scala
index a388fdc392..a8c498b814 100644
--- a/src/library/scala/reflect/base/Scopes.scala
+++ b/src/library/scala/reflect/base/Scopes.scala
@@ -1,8 +1,16 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for scopes.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Scopes { self: Universe =>
+ /** The base type of all scopes. A scope object generally maps names to symbols available in the current lexical scope.
+ * Scopes can be nested. This base type, however, only exposes a minimal interface, representing a scope as an iterable of symbols.
+ */
type Scope >: Null <: ScopeBase
/** The base API that all scopes support */
@@ -13,12 +21,13 @@ trait Scopes { self: Universe =>
*/
implicit val ScopeTag: ClassTag[Scope]
+ /** The base type of member scopes, as in class definitions, for example. */
type MemberScope >: Null <: Scope with MemberScopeBase
/** The base API that all member scopes support */
trait MemberScopeBase extends ScopeBase {
/** Sorts the symbols included in this scope so that:
- * 1) Symbols appear the linearization order of their owners.
+ * 1) Symbols appear in the linearization order of their owners.
* 2) Symbols with the same owner appear in reverse order of their declarations.
* 3) Synthetic members (e.g. getters/setters for vals/vars) might appear in arbitrary order.
*/
@@ -30,12 +39,12 @@ trait Scopes { self: Universe =>
*/
implicit val MemberScopeTag: ClassTag[MemberScope]
- /** Create a new scope */
+ /** Create a new scope. */
def newScope: Scope
- /** Create a new scope nested in another one with which it shares its elements */
+ /** Create a new scope nested in another one with which it shares its elements. */
def newNestedScope(outer: Scope): Scope
- /** Create a new scope with given initial elements */
+ /** Create a new scope with the given initial elements. */
def newScopeWith(elems: Symbol*): Scope
} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/StandardDefinitions.scala b/src/library/scala/reflect/base/StandardDefinitions.scala
index 8f1c96ea3f..4df8501b3d 100644
--- a/src/library/scala/reflect/base/StandardDefinitions.scala
+++ b/src/library/scala/reflect/base/StandardDefinitions.scala
@@ -6,14 +6,21 @@
package scala.reflect
package base
+/**
+ * Defines standard symbols and types.
+ */
trait StandardDefinitions {
self: Universe =>
+ /** A value containing all standard defnitions. */
val definitions: DefinitionsBase
+ /** Defines standard symbols (and types via its base class). */
trait DefinitionsBase extends StandardTypes {
- // packages
+ /** The class symbol of package `scala`. */
def ScalaPackageClass: ClassSymbol
+
+ /** The module class symbol of package `scala`. */
def ScalaPackage: ModuleSymbol
// top types
@@ -37,36 +44,67 @@ trait StandardDefinitions {
def DoubleClass : ClassSymbol
def BooleanClass: ClassSymbol
- // some special classes
+ /** The class symbol of class `String`. */
def StringClass : ClassSymbol
+
+ /** The class symbol of class `Class`. */
def ClassClass : ClassSymbol
+
+ /** The class symbol of class `Array`. */
def ArrayClass : ClassSymbol
+
+ /** The class symbol of class `List`. */
def ListClass : ClassSymbol
- // the Predef object
+ /** The module symbol of `scala.Predef`. */
def PredefModule: ModuleSymbol
}
+ /** Defines standard types. */
trait StandardTypes {
- // the scala value classes
+ /** The `Type` of type `Unit`. */
val UnitTpe: Type
+
+ /** The `Type` of primitive type `Byte`. */
val ByteTpe: Type
+
+ /** The `Type` of primitive type `Short`. */
val ShortTpe: Type
+
+ /** The `Type` of primitive type `Char`. */
val CharTpe: Type
+
+ /** The `Type` of primitive type `Int`. */
val IntTpe: Type
+
+ /** The `Type` of primitive type `Long`. */
val LongTpe: Type
+
+ /** The `Type` of primitive type `Float`. */
val FloatTpe: Type
+
+ /** The `Type` of primitive type `Double`. */
val DoubleTpe: Type
+
+ /** The `Type` of primitive type `Boolean`. */
val BooleanTpe: Type
- // top types
+ /** The `Type` of type `Any`. */
val AnyTpe: Type
+
+ /** The `Type` of type `AnyVal`. */
val AnyValTpe: Type
+
+ /** The `Type` of type `AnyRef`. */
val AnyRefTpe: Type
+
+ /** The `Type` of type `Object`. */
val ObjectTpe: Type
- // bottom types
+ /** The `Type` of type `Nothing`. */
val NothingTpe: Type
+
+ /** The `Type` of type `Null`. */
val NullTpe: Type
}
}
diff --git a/src/library/scala/reflect/base/StandardNames.scala b/src/library/scala/reflect/base/StandardNames.scala
index 3e569cd523..0b4ec3728a 100644
--- a/src/library/scala/reflect/base/StandardNames.scala
+++ b/src/library/scala/reflect/base/StandardNames.scala
@@ -11,6 +11,10 @@ package base
// Is it necessary to perform reflection (like ERROR or LOCAL_SUFFIX_STRING)? If yes, then it goes to api.StandardNames.
// Otherwise it goes nowhere - reflection API should stay minimalistic.
+// TODO: document better
+/**
+ * Names necessary to create Scala trees.
+ */
trait StandardNames {
self: Universe =>
diff --git a/src/library/scala/reflect/base/Symbols.scala b/src/library/scala/reflect/base/Symbols.scala
index 294fa19d62..4a1eef014c 100644
--- a/src/library/scala/reflect/base/Symbols.scala
+++ b/src/library/scala/reflect/base/Symbols.scala
@@ -1,9 +1,14 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for symbols
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Symbols { self: Universe =>
- /** The abstract type of symbols representing declarations */
+ /** The type of symbols representing declarations */
type Symbol >: Null <: SymbolBase
/** A tag that preserves the identity of the `Symbol` abstract type from erasure.
@@ -11,7 +16,7 @@ trait Symbols { self: Universe =>
*/
implicit val SymbolTag: ClassTag[Symbol]
- /** The abstract type of type symbols representing type, class, and trait declarations,
+ /** The type of type symbols representing type, class, and trait declarations,
* as well as type parameters
*/
type TypeSymbol >: Null <: Symbol with TypeSymbolBase
@@ -21,7 +26,7 @@ trait Symbols { self: Universe =>
*/
implicit val TypeSymbolTag: ClassTag[TypeSymbol]
- /** The abstract type of term symbols representing val, var, def, and object declarations as
+ /** The type of term symbols representing val, var, def, and object declarations as
* well as packages and value parameters.
*/
type TermSymbol >: Null <: Symbol with TermSymbolBase
@@ -31,7 +36,7 @@ trait Symbols { self: Universe =>
*/
implicit val TermSymbolTag: ClassTag[TermSymbol]
- /** The abstract type of method symbols representing def declarations */
+ /** The type of method symbols representing def declarations */
type MethodSymbol >: Null <: TermSymbol with MethodSymbolBase
/** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
@@ -39,7 +44,7 @@ trait Symbols { self: Universe =>
*/
implicit val MethodSymbolTag: ClassTag[MethodSymbol]
- /** The abstract type of module symbols representing object declarations */
+ /** The type of module symbols representing object declarations */
type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolBase
/** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
@@ -47,7 +52,7 @@ trait Symbols { self: Universe =>
*/
implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
- /** The abstract type of class symbols representing class and trait definitions */
+ /** The type of class symbols representing class and trait definitions */
type ClassSymbol >: Null <: TypeSymbol with ClassSymbolBase
/** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
@@ -55,7 +60,7 @@ trait Symbols { self: Universe =>
*/
implicit val ClassSymbolTag: ClassTag[ClassSymbol]
- /** The abstract type of free terms introduced by reification */
+ /** The type of free terms introduced by reification */
type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolBase
/** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
@@ -63,7 +68,7 @@ trait Symbols { self: Universe =>
*/
implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
- /** The abstract type of free types introduced by reification */
+ /** The type of free types introduced by reification */
type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolBase
/** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
@@ -81,10 +86,10 @@ trait Symbols { self: Universe =>
* that directly contains the current symbol's definition.
* The `NoSymbol` symbol does not have an owner, and calling this method
* on one causes an internal error.
- * The owner of the Scala root class [[scala.reflect.api.mirror.RootClass]]
- * and the Scala root object [[scala.reflect.api.mirror.RootPackage]] is `NoSymbol`.
+ * The owner of the Scala root class [[scala.reflect.base.MirrorOf.RootClass]]
+ * and the Scala root object [[scala.reflect.base.MirrorOf.RootPackage]] is `NoSymbol`.
* Every other symbol has a chain of owners that ends in
- * [[scala.reflect.api.mirror.RootClass]].
+ * [[scala.reflect.base.MirrorOf.RootClass]].
*/
def owner: Symbol
@@ -107,8 +112,8 @@ trait Symbols { self: Universe =>
/** Does this symbol represent the definition of a type?
* Note that every symbol is either a term or a type.
- * So for every symbol `sym`, either `sym.isTerm` is true
- * or `sym.isType` is true.
+ * So for every symbol `sym` (except for `NoSymbol`),
+ * either `sym.isTerm` is true or `sym.isType` is true.
*/
def isType: Boolean = false
@@ -118,9 +123,9 @@ trait Symbols { self: Universe =>
def asType: TypeSymbol = throw new ScalaReflectionException(s"$this is not a type")
/** Does this symbol represent the definition of a term?
- * Note that every symbol is either a term or a term.
- * So for every symbol `sym`, either `sym.isTerm` is true
- * or `sym.isTerm` is true.
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym` (except for `NoSymbol`),
+ * either `sym.isTerm` is true or `sym.isTerm` is true.
*/
def isTerm: Boolean = false
@@ -234,10 +239,10 @@ trait Symbols { self: Universe =>
* `PolyType(ClassInfoType(...))` that describes type parameters, value
* parameters, parent types, and members of `C`.
*/
- def toType: Type
+ def toType: Type
- override def isType = true
- override def asType = this
+ final override def isType = true
+ final override def asType = this
}
/** The base API that all term symbols support */
diff --git a/src/library/scala/reflect/base/TagInterop.scala b/src/library/scala/reflect/base/TagInterop.scala
index ec054106eb..e989631abf 100644
--- a/src/library/scala/reflect/base/TagInterop.scala
+++ b/src/library/scala/reflect/base/TagInterop.scala
@@ -4,12 +4,26 @@ package base
import scala.runtime.ScalaRunTime._
trait TagInterop { self: Universe =>
- // todo. `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
+ // TODO `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
// if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
+ /**
+ * Convert a typetag to a pre `Scala-2.10` manifest.
+ * For example
+ * {{{
+ * typeTagToManifest( scala.reflect.runtime.currentMirror, implicitly[TypeTag[String]] )
+ * }}}
+ */
def typeTagToManifest[T: ClassTag](mirror: Any, tag: base.Universe # TypeTag[T]): Manifest[T] =
throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
+ /**
+ * Convert a pre `Scala-2.10` manifest to a typetag.
+ * For example
+ * {{{
+ * manifestToTypeTag( scala.reflect.runtime.currentMirror, implicitly[Manifest[String]] )
+ * }}}
+ */
def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): base.Universe # TypeTag[T] =
throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
}
diff --git a/src/library/scala/reflect/base/TreeCreator.scala b/src/library/scala/reflect/base/TreeCreator.scala
index c9c8de2307..5de0094f1f 100644
--- a/src/library/scala/reflect/base/TreeCreator.scala
+++ b/src/library/scala/reflect/base/TreeCreator.scala
@@ -1,6 +1,26 @@
package scala.reflect
package base
+/** A mirror-aware factory for trees.
+ *
+ * In the reflection API, artifacts are specific to universes and
+ * symbolic references used in artifacts (e.g. `scala.Int`) are resolved by mirrors.
+ *
+ * Therefore to build a tree one needs to know a universe that the tree is going to be bound to
+ * and a mirror that is going to resolve symbolic references (e.g. to determine that `scala.Int`
+ * points to a core class `Int` from scala-library.jar).
+ *
+ * `TreeCreator` implements this notion by providing a standalone tree factory.
+ *
+ * This is immediately useful for reification. When the compiler reifies an expression,
+ * the end result needs to make sense in any mirror. That's because the compiler knows
+ * the universe it's reifying an expression into (specified by the target of the `reify` call),
+ * but it cannot know in advance the mirror to instantiate the result in (e.g. on JVM
+ * it doesn't know what classloader use to resolve symbolic names in the reifee).
+ *
+ * Due to a typechecker restriction (no eta-expansion for dependent method types),
+ * `TreeCreator` can't have a functional type, so it's implemented as class with an apply method.
+ */
abstract class TreeCreator {
def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Tree
}
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
index 224965a2b7..428b493478 100644
--- a/src/library/scala/reflect/base/Trees.scala
+++ b/src/library/scala/reflect/base/Trees.scala
@@ -9,9 +9,11 @@ trait Trees { self: Universe =>
/** The base API that all trees support */
abstract class TreeBase extends Product { this: Tree =>
+ // TODO
/** ... */
def isDef: Boolean
+ // TODO
/** ... */
def isEmpty: Boolean
@@ -52,7 +54,7 @@ trait Trees { self: Universe =>
* Transformer found around the compiler.
*
* Copying Trees should be done with care depending on whether
- * it need be done lazily or strictly (see LazyTreeCopier and
+ * it needs be done lazily or strictly (see LazyTreeCopier and
* StrictTreeCopier) and on whether the contents of the mutable
* fields should be copied. The tree copiers will copy the mutable
* attributes to the new tree; calling Tree#duplicate will copy
@@ -67,8 +69,8 @@ trait Trees { self: Universe =>
*
* SymTrees include important nodes Ident and Select, which are
* used as both terms and types; they are distinguishable based on
- * whether the Name is a TermName or TypeName. The correct way for
- * to test for a type or a term (on any Tree) are the isTerm/isType
+ * whether the Name is a TermName or TypeName. The correct way
+ * to test any Tree for a type or a term are the `isTerm`/`isType`
* methods on Tree.
*
* "Others" are mostly syntactic or short-lived constructs. Examples
@@ -86,7 +88,7 @@ trait Trees { self: Universe =>
/** The empty tree */
val EmptyTree: Tree
- /** A tree for a term. Not all terms are TermTrees; use isTerm
+ /** A tree for a term. Not all trees representing terms are TermTrees; use isTerm
* to reliably identify terms.
*/
type TermTree >: Null <: AnyRef with Tree
@@ -96,7 +98,7 @@ trait Trees { self: Universe =>
*/
implicit val TermTreeTag: ClassTag[TermTree]
- /** A tree for a type. Not all types are TypTrees; use isType
+ /** A tree for a type. Not all trees representing types are TypTrees; use isType
* to reliably identify types.
*/
type TypTree >: Null <: AnyRef with Tree
@@ -213,7 +215,7 @@ trait Trees { self: Universe =>
/** An object definition, e.g. `object Foo`. Internally, objects are
* quite frequently called modules to reduce ambiguity.
- * Eliminated by refcheck.
+ * Eliminated by compiler phase refcheck.
*/
type ModuleDef >: Null <: ImplDef
@@ -253,8 +255,8 @@ trait Trees { self: Universe =>
* - immutable values, e.g. "val x"
* - mutable values, e.g. "var x" - the MUTABLE flag set in mods
* - lazy values, e.g. "lazy val x" - the LAZY flag set in mods
- * - method parameters, see vparamss in DefDef - the PARAM flag is set in mods
- * - explicit self-types, e.g. class A { self: Bar => } - !!! not sure what is set.
+ * - method parameters, see vparamss in [[scala.reflect.base.Trees#DefDef]] - the PARAM flag is set in mods
+ * - explicit self-types, e.g. class A { self: Bar => }
*/
type ValDef >: Null <: ValOrDefDef
@@ -267,7 +269,7 @@ trait Trees { self: Universe =>
val ValDef: ValDefExtractor
/** An extractor class to create and pattern match with syntax `ValDef(mods, name, tpt, rhs)`.
- * This AST node corresponds to the following Scala code:
+ * This AST node corresponds to any of the following Scala code:
*
* mods `val` name: tpt = rhs
*
@@ -275,7 +277,7 @@ trait Trees { self: Universe =>
*
* mods name: tpt = rhs // in signatures of function and method definitions
*
- * self: Bar => // self-types (!!! not sure what is set)
+ * self: Bar => // self-types
*
* If the type of a value is not specified explicitly (i.e. is meant to be inferred),
* this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
@@ -369,9 +371,12 @@ trait Trees { self: Universe =>
* This AST node does not have direct correspondence to Scala code.
* It is used for tailcalls and like.
* For example, while/do are desugared to label defs as follows:
- *
+ * {{{
* while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
+ * }}}
+ * {{{
* do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
+ * }}}
*/
abstract class LabelDefExtractor {
def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
@@ -427,7 +432,7 @@ trait Trees { self: Universe =>
*
* import expr.{selectors}
*
- * Selectors are a list of pairs of names (from, to). // [Eugene++] obviously, they no longer are. please, document!
+ * Selectors are a list of ImportSelectors, which conceptually are pairs of names (from, to).
* The last (and maybe only name) may be a nme.WILDCARD. For instance:
*
* import qual.{x, y => z, _}
@@ -498,16 +503,16 @@ trait Trees { self: Universe =>
*
* { stats; expr }
*
- * If the block is empty, the `expr` is set to `Literal(Constant(()))`. // [Eugene++] check this
+ * If the block is empty, the `expr` is set to `Literal(Constant(()))`.
*/
abstract class BlockExtractor {
def apply(stats: List[Tree], expr: Tree): Block
def unapply(block: Block): Option[(List[Tree], Tree)]
}
- /** Case clause in a pattern match, eliminated during explicitouter
+ /** Case clause in a pattern match.
* (except for occurrences in switch statements).
- * Eliminated by patmat/explicitouter.
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher)
*/
type CaseDef >: Null <: AnyRef with Tree
@@ -524,17 +529,19 @@ trait Trees { self: Universe =>
*
* `case` pat `if` guard => body
*
- * If the guard is not present, the `guard` is set to `EmptyTree`. // [Eugene++] check this
- * If the body is not specified, the `body` is set to `EmptyTree`. // [Eugene++] check this
+ * If the guard is not present, the `guard` is set to `EmptyTree`.
+ * If the body is not specified, the `body` is set to `Literal(Constant())`
*/
abstract class CaseDefExtractor {
def apply(pat: Tree, guard: Tree, body: Tree): CaseDef
def unapply(caseDef: CaseDef): Option[(Tree, Tree, Tree)]
}
- /** Alternatives of patterns, eliminated by explicitouter, except for
- * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...)))
- * Eliminated by patmat/explicitouter.
+ /** Alternatives of patterns.
+ *
+ * Eliminated by compiler phases Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher),
+ * except for
+ * occurrences in encoded Switch stmt (i.e. remaining Match(CaseDef(...)))
*/
type Alternative >: Null <: TermTree
@@ -557,7 +564,8 @@ trait Trees { self: Universe =>
}
/** Repetition of pattern.
- * Eliminated by patmat/explicitouter.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
*/
type Star >: Null <: TermTree
@@ -579,8 +587,9 @@ trait Trees { self: Universe =>
def unapply(star: Star): Option[Tree]
}
- /** Bind of a variable to a rhs pattern, eliminated by explicitouter
- * Eliminated by patmat/explicitouter.
+ /** Bind a variable to a rhs pattern.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
*
* @param name
* @param body
@@ -605,10 +614,32 @@ trait Trees { self: Universe =>
def unapply(bind: Bind): Option[(Name, Tree)]
}
- /** Noone knows what this is.
- * It is not idempotent w.r.t typechecking.
- * Can we, please, remove it?
- * Introduced by typer, eliminated by patmat/explicitouter.
+ /**
+ * Used to represent `unapply` methods in pattern matching.
+ *
+ * For example:
+ * {{{
+ * 2 match { case Foo(x) => x }
+ * }}}
+ *
+ * Is represented as:
+ * {{{
+ * Match(
+ * Literal(Constant(2)),
+ * List(
+ * CaseDef(
+ * UnApply(
+ * // a dummy node that carries the type of unapplication to patmat
+ * // the <unapply-selector> here doesn't have an underlying symbol
+ * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+ * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * // arguments of the unapply => nothing synthetic here
+ * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * EmptyTree,
+ * Ident(newTermName("x")))))
+ * }}}
+ *
+ * Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
*/
type UnApply >: Null <: TermTree
@@ -629,9 +660,9 @@ trait Trees { self: Universe =>
def unapply(unApply: UnApply): Option[(Tree, List[Tree])]
}
- /** Array of expressions, needs to be translated in backend.
- * This AST node is used to pass arguments to vararg arguments.
- * Introduced by uncurry.
+ /** An array of expressions. This AST node needs to be translated in backend.
+ * It is used to pass arguments to vararg arguments.
+ * Introduced by compiler phase uncurry.
*/
type ArrayValue >: Null <: TermTree
@@ -649,7 +680,7 @@ trait Trees { self: Universe =>
*
* printf("%s%d", foo, 42)
*
- * Is translated to after uncurry to:
+ * Is translated to after compiler phase uncurry to:
*
* Apply(
* Ident("printf"),
@@ -661,7 +692,7 @@ trait Trees { self: Universe =>
def unapply(arrayValue: ArrayValue): Option[(Tree, List[Tree])]
}
- /** Anonymous function, eliminated by lambdalift */
+ /** Anonymous function, eliminated by compiler phase lambdalift */
type Function >: Null <: TermTree with SymTree
/** A tag that preserves the identity of the `Function` abstract type from erasure.
@@ -674,10 +705,10 @@ trait Trees { self: Universe =>
/** An extractor class to create and pattern match with syntax `Function(vparams, body)`.
* This AST node corresponds to the following Scala code:
- *
+ *
* vparams => body
*
- * The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
+ * The symbol of a Function is a synthetic TermSymbol.
* It is the owner of the function's parameters.
*/
abstract class FunctionExtractor {
@@ -707,7 +738,7 @@ trait Trees { self: Universe =>
}
/** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply), resurrected by reifier.
+ * eliminated by compiler phase typecheck (doTypedApply), resurrected by reifier.
*/
type AssignOrNamedArg >: Null <: TermTree
@@ -722,9 +753,13 @@ trait Trees { self: Universe =>
/** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`.
* This AST node corresponds to the following Scala code:
*
+ * {{{
+ * m.f(lhs = rhs)
+ * }}}
+ * {{{
* @annotation(lhs = rhs)
+ * }}}
*
- * m.f(lhs = rhs)
*/
abstract class AssignOrNamedArgExtractor {
def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg
@@ -747,17 +782,17 @@ trait Trees { self: Universe =>
*
* `if` (cond) thenp `else` elsep
*
- * If the alternative is not present, the `elsep` is set to `EmptyTree`. // [Eugene++] check this
+ * If the alternative is not present, the `elsep` is set to `Literal(Constant(()))`.
*/
abstract class IfExtractor {
def apply(cond: Tree, thenp: Tree, elsep: Tree): If
def unapply(if_ : If): Option[(Tree, Tree, Tree)]
}
- /** - Pattern matching expression (before explicitouter)
- * - Switch statements (after explicitouter)
+ /** - Pattern matching expression (before compiler phase explicitouter before 2.10 / patmat from 2.10)
+ * - Switch statements (after compiler phase explicitouter before 2.10 / patmat from 2.10)
*
- * After explicitouter, cases will satisfy the following constraints:
+ * After compiler phase explicitouter before 2.10 / patmat from 2.10, cases will satisfy the following constraints:
*
* - all guards are `EmptyTree`,
* - all patterns will be either `Literal(Constant(x:Int))`
@@ -780,7 +815,7 @@ trait Trees { self: Universe =>
*
* selector `match` { cases }
*
- * // [Eugene++] say something about `val (foo, bar) = baz` and likes.
+ * `Match` is also used in pattern matching assignments like `val (foo, bar) = baz`.
*/
abstract class MatchExtractor {
def apply(selector: Tree, cases: List[CaseDef]): Match
@@ -803,7 +838,7 @@ trait Trees { self: Universe =>
*
* `return` expr
*
- * The symbol of a Return node is the enclosing method
+ * The symbol of a Return node is the enclosing method.
*/
abstract class ReturnExtractor {
def apply(expr: Tree): Return
@@ -826,7 +861,7 @@ trait Trees { self: Universe =>
*
* `try` block `catch` { catches } `finally` finalizer
*
- * If the finalizer is not present, the `finalizer` is set to `EmptyTree`. // [Eugene++] check this
+ * If the finalizer is not present, the `finalizer` is set to `EmptyTree`.
*/
abstract class TryExtractor {
def apply(block: Tree, catches: List[CaseDef], finalizer: Tree): Try
@@ -855,9 +890,6 @@ trait Trees { self: Universe =>
}
/** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
*/
type New >: Null <: TermTree
@@ -866,7 +898,8 @@ trait Trees { self: Universe =>
*/
implicit val NewTag: ClassTag[New]
- /** The constructor/deconstructor for `New` instances. */
+ /** The constructor/deconstructor for `New` instances.
+ */
val New: NewExtractor
/** An extractor class to create and pattern match with syntax `New(tpt)`.
@@ -879,11 +912,16 @@ trait Trees { self: Universe =>
* (`new` tpt).<init>[targs](args)
*/
abstract class NewExtractor {
+ /** A user level `new`.
+ * One should always use this factory method to build a user level `new`.
+ *
+ * @param tpt a class type
+ */
def apply(tpt: Tree): New
def unapply(new_ : New): Option[Tree]
}
- /** Type annotation, eliminated by cleanup */
+ /** Type annotation, eliminated by compiler phase cleanup */
type Typed >: Null <: TermTree
/** A tag that preserves the identity of the `Typed` abstract type from erasure.
@@ -904,11 +942,7 @@ trait Trees { self: Universe =>
def unapply(typed: Typed): Option[(Tree, Tree)]
}
- /** Common base class for Apply and TypeApply. This could in principle
- * be a SymTree, but whether or not a Tree is a SymTree isn't used
- * to settle any interesting questions, and it would add a useless
- * field to all the instances (useless, since GenericApply forwards to
- * the underlying fun.)
+ /** Common base class for Apply and TypeApply.
*/
type GenericApply >: Null <: TermTree
@@ -917,11 +951,11 @@ trait Trees { self: Universe =>
*/
implicit val GenericApplyTag: ClassTag[GenericApply]
- /** Explicit type application.
- * @PP: All signs point toward it being a requirement that args.nonEmpty,
+ /* @PP: All signs point toward it being a requirement that args.nonEmpty,
* but I can't find that explicitly stated anywhere. Unless your last name
* is odersky, you should probably treat it as true.
*/
+ /** Explicit type application. */
type TypeApply >: Null <: GenericApply
/** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
@@ -971,38 +1005,8 @@ trait Trees { self: Universe =>
def unapply(apply: Apply): Option[(Tree, List[Tree])]
}
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- * [Eugene++] what is it used for?
- * Introduced by erasure, eliminated by cleanup.
- */
- type ApplyDynamic >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `ApplyDynamic` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ApplyDynamicTag: ClassTag[ApplyDynamic]
-
- /** The constructor/deconstructor for `ApplyDynamic` instances. */
- val ApplyDynamic: ApplyDynamicExtractor
-
- /** An extractor class to create and pattern match with syntax `ApplyDynamic(qual, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun(args)
- *
- * The symbol of an ApplyDynamic is the function symbol of `qual`, or NoSymbol, if there is none.
- */
- abstract class ApplyDynamicExtractor {
- def apply(qual: Tree, args: List[Tree]): ApplyDynamic
- def unapply(applyDynamic: ApplyDynamic): Option[(Tree, List[Tree])]
- }
-
- /** Super reference, qual = corresponding this reference
- * A super reference C.super[M] is represented as Super(This(C), M).
+ /** Super reference, where `qual` is the corresponding `this` reference.
+ * A super reference `C.super[M]` is represented as `Super(This(C), M)`.
*/
type Super >: Null <: TermTree
diff --git a/src/library/scala/reflect/base/TypeCreator.scala b/src/library/scala/reflect/base/TypeCreator.scala
index 8a14e53dd3..0260fe1410 100644
--- a/src/library/scala/reflect/base/TypeCreator.scala
+++ b/src/library/scala/reflect/base/TypeCreator.scala
@@ -1,6 +1,26 @@
package scala.reflect
package base
+/** A mirror-aware factory for types.
+ *
+ * In the reflection API, artifacts are specific to universes and
+ * symbolic references used in artifacts (e.g. `scala.Int`) are resolved by mirrors.
+ *
+ * Therefore to build a type one needs to know a universe that the type is going to be bound to
+ * and a mirror that is going to resolve symbolic references (e.g. to determine that `scala.Int`
+ * points to a core class `Int` from scala-library.jar).
+ *
+ * `TypeCreator` implements this notion by providing a standalone type factory.
+ *
+ * This is immediately useful for type tags. When the compiler creates a type tag,
+ * the end result needs to make sense in any mirror. That's because the compiler knows
+ * the universe it's creating a type tag for (since `TypeTag` is path-dependent on a universe),
+ * but it cannot know in advance the mirror to instantiate the result in (e.g. on JVM
+ * it doesn't know what classloader use to resolve symbolic names in the type tag).
+ *
+ * Due to a typechecker restriction (no eta-expansion for dependent method types),
+ * `TypeCreator` can't have a functional type, so it's implemented as class with an apply method.
+ */
abstract class TypeCreator {
def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type
}
diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala
index b7e0c37a4b..db9fa95553 100644
--- a/src/library/scala/reflect/base/TypeTags.scala
+++ b/src/library/scala/reflect/base/TypeTags.scala
@@ -3,76 +3,113 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package base
import java.lang.{ Class => jClass }
-import language.implicitConversions
+import scala.language.implicitConversions
+/*
+ * TODO
+ * add @see to docs about universes
+ * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
+ * [Chris++] tag.in(some mirror) or expr.in(some mirror) (does not work for tag and exprs in macros)
+ * Backwards compat item1: [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
+ */
/**
- * Type tags encapsulate a representation of type T.
- * They are supposed to replace the pre-2.10 concept of a [[scala.reflect.Manifest]].
- * TypeTags are much better integrated with reflection than manifests are, and are consequently much simpler.
+ * A type tag encapsulates a representation of type T.
+ *
+ * Type tags replace the pre-2.10 concept of a [[scala.reflect.Manifest]] and are integrated with reflection.
*
- * === Overview ===
+ * === Overview and examples ===
*
* Type tags are organized in a hierarchy of three classes:
- * [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#AbsTypeTag]].
- *
- * A [[scala.reflect.ClassTag]] carries a runtime class that corresponds to the source type T.
- * As of such, it possesses the knowledge about how to build single- and multi-dimensional arrays of elements of that type.
- * It guarantees that the source type T did not to contain any references to type parameters or abstract types.
- * [[scala.reflect.ClassTag]] corresponds to a previous notion of [[scala.reflect.ClassManifest]].
- *
- * A [[scala.reflect.base.Universe#AbsTypeTag]] value wraps a full Scala type in its tpe field.
- * A [[scala.reflect.base.Universe#TypeTag]] value is an [[scala.reflect.base.Universe#AbsTypeTag]]
- * that is guaranteed not to contain any references to type parameters or abstract types.
- *
- * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
- * [Eugene++] migratability between mirrors and universes is also worth mentioning
+ * [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#WeakTypeTag]].
+ *
+ * @see [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]], [[scala.reflect.base.Universe#WeakTypeTag]]
*
- * === Splicing ===
+ * Examples:
+ * {{{
+ * scala> class Person
+ * scala> class Container[T]
+ * scala> import scala.reflect.ClassTag
+ * scala> import scala.reflect.runtime.universe.TypeTag
+ * scala> import scala.reflect.runtime.universe.WeakTypeTag
+ * scala> def firstTypeArg( tag: WeakTypeTag[_] ) = (tag.tpe match {case TypeRef(_,_,typeArgs) => typeArgs})(0)
+ * }}}
+ * TypeTag contains concrete type arguments:
+ * {{{
+ * scala> firstTypeArg( implicitly[TypeTag[Container[Person]]] )
+ * res0: reflect.runtime.universe.Type = Person
+ * }}}
+ * TypeTag guarantees concrete type arguments (fails for references to unbound type arguments):
+ * {{{
+ * scala> def foo1[T] = implicitly[TypeTag[Container[T]]]
+ * <console>:11: error: No TypeTag available for Container[T]
+ * def foo1[T] = implicitly[TypeTag[Container[T]]]
+ * }}}
+ * WeakTypeTag allows references to unbound type arguments:
+ * {{{
+ * scala> def foo2[T] = firstTypeArg( implicitly[WeakTypeTag[Container[T]]] )
+ * foo2: [T]=> reflect.runtime.universe.Type
+ * scala> foo2[Person]
+ * res1: reflect.runtime.universe.Type = T
+ * }}}
+ * TypeTag allows unbound type arguments for which type tags are available:
+ * {{{
+ * scala> def foo3[T:TypeTag] = firstTypeArg( implicitly[TypeTag[Container[T]]] )
+ * foo3: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Type
+ * scala> foo3[Person]
+ * res1: reflect.runtime.universe.Type = Person
+ * }}}
+ * WeakTypeTag contains concrete type arguments if available via existing tags:
+ * {{{
+ * scala> def foo4[T:WeakTypeTag] = firstTypeArg( implicitly[WeakTypeTag[Container[T]]] )
+ * foo4: [T](implicit evidence$1: reflect.runtime.universe.WeakTypeTag[T])reflect.runtime.universe.Type
+ * scala> foo4[Person]
+ * res1: reflect.runtime.universe.Type = Person
+ * }}}
*
- * Tags can be spliced, i.e. if compiler generates a tag for a type that contains references to tagged
- * type parameters or abstract type members, it will retrieve the corresponding tag and embed it into the result.
- * An example that illustrates the TypeTag embedding, consider the following function:
*
- * import reflect.mirror._
- * def f[T: TypeTag, U] = {
- * type L = T => U
- * implicitly[AbsTypeTag[L]]
- * }
+ * [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#WeakTypeTag]] are path dependent on their universe.
*
- * Then a call of f[String, Int] will yield a result of the form
+ * The default universe is [[scala.reflect.runtime.universe]]
+ *
+ * Type tags can be migrated to another universe given the corresponding mirror using
*
- * AbsTypeTag(<[ String => U ]>).
+ * {{{
+ * tag.in( other_mirror )
+ * }}}
+ *
+ * See [[scala.reflect.base.TypeTags#WeakTypeTag.in]]
*
- * Note that T has been replaced by String, because it comes with a TypeTag in f, whereas U was left as a type parameter.
+ * === WeakTypeTag vs TypeTag ===
*
- * === AbsTypeTag vs TypeTag ===
- *
- * Be careful with AbsTypeTag, because it will reify types even if these types are abstract.
+ * Be careful with WeakTypeTag, because it will reify types even if these types are abstract.
* This makes it easy to forget to tag one of the methods in the call chain and discover it much later in the runtime
* by getting cryptic errors far away from their source. For example, consider the following snippet:
*
- * def bind[T: AbsTypeTag](name: String, value: T): IR.Result = bind((name, value))
- * def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ * {{{
+ * def bind[T: WeakTypeTag](name: String, value: T): IR.Result = bind((name, value))
+ * def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
* object NamedParam {
- * implicit def namedValue[T: AbsTypeTag](name: String, x: T): NamedParam = apply(name, x)
- * def apply[T: AbsTypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
+ * implicit def namedValue[T: WeakTypeTag](name: String, x: T): NamedParam = apply(name, x)
+ * def apply[T: WeakTypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
* }
- *
- * This fragment of Scala REPL implementation defines a `bind` function that carries a named value along with its type
- * into the heart of the REPL. Using a [[scala.reflect.base.Universe#AbsTypeTag]] here is reasonable, because it is desirable
+ * }}}
+ *
+ * This fragment of the Scala REPL implementation defines a `bind` function that carries a named value along with its type
+ * into the heart of the REPL. Using a [[scala.reflect.base.Universe#WeakTypeTag]] here is reasonable, because it is desirable
* to work with all types, even if they are type parameters or abstract type members.
*
- * However if any of the three `AbsTypeTag` context bounds is omitted, the resulting code will be incorrect,
- * because the missing `AbsTypeTag` will be transparently generated by the compiler, carrying meaningless information.
+ * However if any of the three `WeakTypeTag` context bounds is omitted, the resulting code will be incorrect,
+ * because the missing `WeakTypeTag` will be transparently generated by the compiler, carrying meaningless information.
* Most likely, this problem will manifest itself elsewhere, making debugging complicated.
- * If `AbsTypeTag` context bounds were replaced with `TypeTag`, then such errors would be reported statically.
+ * If `WeakTypeTag` context bounds were replaced with `TypeTag`, then such errors would be reported statically.
* But in that case we wouldn't be able to use `bind` in arbitrary contexts.
*
- * === Backward compatibility ===
+ * === Backward compatibility with Manifests ===
*
* Type tags correspond loosely to manifests.
*
@@ -80,110 +117,137 @@ import language.implicitConversions
* The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
* The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.runtime.universe.TypeTag,
*
- * In Scala 2.10, manifests are deprecated, so it's adviseable to migrate them to tags,
- * because manifests might be removed in the next major release.
+ * In Scala 2.10, manifests are deprecated, so it's advisable to migrate them to tags,
+ * because manifests will probably be removed in the next major release.
*
- * In most cases it will be enough to replace ClassManifests with ClassTags and Manifests with TypeTags,
- * however there are a few caveats:
+ * In most cases it will be enough to replace ClassManifest with ClassTag and Manifest with TypeTag.
+ * There are however a few caveats:
*
* 1) The notion of OptManifest is no longer supported. Tags can reify arbitrary types, so they are always available.
- * // [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
*
* 2) There's no equivalent for AnyValManifest. Consider comparing your tag with one of the base tags
* (defined in the corresponding companion objects) to find out whether it represents a primitive value class.
* You can also use `<tag>.tpe.typeSymbol.isPrimitiveValueClass` for that purpose (requires scala-reflect.jar).
*
* 3) There's no replacement for factory methods defined in `ClassManifest` and `Manifest` companion objects.
- * Consider assembling corresponding types using reflection API provided by Java (for classes) and Scala (for types).
+ * Consider assembling corresponding types using the reflection APIs provided by Java (for classes) and Scala (for types).
*
* 4) Certain manifest functions (such as `<:<`, `>:>` and `typeArguments`) weren't included in the tag API.
- * Consider using reflection API provided by Java (for classes) and Scala (for types) instead.
+ * Consider using the reflection APIs provided by Java (for classes) and Scala (for types) instead.
*/
trait TypeTags { self: Universe =>
import definitions._
/**
- * If an implicit value of type u.AbsTypeTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its tpe field a value of type u.Type that is a reflective representation of T.
- * In that value, any occurrences of type parameters or abstract types U
- * which come themselves with a TypeTag are represented by the type referenced by that TypeTag.
+ * If an implicit value of type WeakTypeTag[T] is required, the compiler will create one.
+ * A reflective representation of T can be accessed via the tpe field.
+ * Components of T can be references to type parameters or abstract types. WeakTypeTag makes an effort to
+ * be as concrete as possible, i.e. if type tags are available for the referenced type arguments or abstract types,
+ * they are used to embed the concrete types into the WeakTypeTag. Otherwise the WeakTypeTag will contain a reference
+ * to an abstract type. This behavior can be useful, when one expects T to be possibly partially abstract, but
+ * requires special care to handle this case. If however T is expected to be fully known, use
+ * [[scala.reflect.base.Universe#TypeTag]] instead, which statically guarantees this property.
*
* @see [[scala.reflect.base.TypeTags]]
*/
- @annotation.implicitNotFound(msg = "No AbsTypeTag available for ${T}")
- trait AbsTypeTag[T] extends Equals with Serializable {
+ @annotation.implicitNotFound(msg = "No WeakTypeTag available for ${T}")
+ trait WeakTypeTag[T] extends Equals with Serializable {
+ /**
+ * Mirror corresponding to the universe of this WeakTypeTag.
+ */
val mirror: Mirror
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # AbsTypeTag[T]
+ /**
+ * Migrates type tag to another universe.
+ *
+ * Type tags are path dependent on their universe. This methods allows migration
+ * given the mirror corresponding to the target universe.
+ *
+ * Migration means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
+ def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # WeakTypeTag[T]
+
+ /**
+ * Reflective representation of type T.
+ */
def tpe: Type
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[AbsTypeTag[_]]
- override def equals(x: Any) = x.isInstanceOf[AbsTypeTag[_]] && this.mirror == x.asInstanceOf[AbsTypeTag[_]].mirror && this.tpe == x.asInstanceOf[AbsTypeTag[_]].tpe
+ // case class accessories
+ override def canEqual(x: Any) = x.isInstanceOf[WeakTypeTag[_]]
+ override def equals(x: Any) = x.isInstanceOf[WeakTypeTag[_]] && this.mirror == x.asInstanceOf[WeakTypeTag[_]].mirror && this.tpe == x.asInstanceOf[WeakTypeTag[_]].tpe
override def hashCode = mirror.hashCode * 31 + tpe.hashCode
- override def toString = "AbsTypeTag[" + tpe + "]"
+ override def toString = "WeakTypeTag[" + tpe + "]"
}
- object AbsTypeTag {
- val Byte : AbsTypeTag[scala.Byte] = TypeTag.Byte
- val Short : AbsTypeTag[scala.Short] = TypeTag.Short
- val Char : AbsTypeTag[scala.Char] = TypeTag.Char
- val Int : AbsTypeTag[scala.Int] = TypeTag.Int
- val Long : AbsTypeTag[scala.Long] = TypeTag.Long
- val Float : AbsTypeTag[scala.Float] = TypeTag.Float
- val Double : AbsTypeTag[scala.Double] = TypeTag.Double
- val Boolean : AbsTypeTag[scala.Boolean] = TypeTag.Boolean
- val Unit : AbsTypeTag[scala.Unit] = TypeTag.Unit
- val Any : AbsTypeTag[scala.Any] = TypeTag.Any
- val AnyVal : AbsTypeTag[scala.AnyVal] = TypeTag.AnyVal
- val AnyRef : AbsTypeTag[scala.AnyRef] = TypeTag.AnyRef
- val Object : AbsTypeTag[java.lang.Object] = TypeTag.Object
- val Nothing : AbsTypeTag[scala.Nothing] = TypeTag.Nothing
- val Null : AbsTypeTag[scala.Null] = TypeTag.Null
+ /**
+ * Type tags corresponding to primitive types and constructor/extractor for WeakTypeTags.
+ */
+ object WeakTypeTag {
+ val Byte : WeakTypeTag[scala.Byte] = TypeTag.Byte
+ val Short : WeakTypeTag[scala.Short] = TypeTag.Short
+ val Char : WeakTypeTag[scala.Char] = TypeTag.Char
+ val Int : WeakTypeTag[scala.Int] = TypeTag.Int
+ val Long : WeakTypeTag[scala.Long] = TypeTag.Long
+ val Float : WeakTypeTag[scala.Float] = TypeTag.Float
+ val Double : WeakTypeTag[scala.Double] = TypeTag.Double
+ val Boolean : WeakTypeTag[scala.Boolean] = TypeTag.Boolean
+ val Unit : WeakTypeTag[scala.Unit] = TypeTag.Unit
+ val Any : WeakTypeTag[scala.Any] = TypeTag.Any
+ val AnyVal : WeakTypeTag[scala.AnyVal] = TypeTag.AnyVal
+ val AnyRef : WeakTypeTag[scala.AnyRef] = TypeTag.AnyRef
+ val Object : WeakTypeTag[java.lang.Object] = TypeTag.Object
+ val Nothing : WeakTypeTag[scala.Nothing] = TypeTag.Nothing
+ val Null : WeakTypeTag[scala.Null] = TypeTag.Null
+
- def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): AbsTypeTag[T] =
+ def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
tpec1(mirror1) match {
- case ByteTpe => AbsTypeTag.Byte.asInstanceOf[AbsTypeTag[T]]
- case ShortTpe => AbsTypeTag.Short.asInstanceOf[AbsTypeTag[T]]
- case CharTpe => AbsTypeTag.Char.asInstanceOf[AbsTypeTag[T]]
- case IntTpe => AbsTypeTag.Int.asInstanceOf[AbsTypeTag[T]]
- case LongTpe => AbsTypeTag.Long.asInstanceOf[AbsTypeTag[T]]
- case FloatTpe => AbsTypeTag.Float.asInstanceOf[AbsTypeTag[T]]
- case DoubleTpe => AbsTypeTag.Double.asInstanceOf[AbsTypeTag[T]]
- case BooleanTpe => AbsTypeTag.Boolean.asInstanceOf[AbsTypeTag[T]]
- case UnitTpe => AbsTypeTag.Unit.asInstanceOf[AbsTypeTag[T]]
- case AnyTpe => AbsTypeTag.Any.asInstanceOf[AbsTypeTag[T]]
- case AnyValTpe => AbsTypeTag.AnyVal.asInstanceOf[AbsTypeTag[T]]
- case AnyRefTpe => AbsTypeTag.AnyRef.asInstanceOf[AbsTypeTag[T]]
- case ObjectTpe => AbsTypeTag.Object.asInstanceOf[AbsTypeTag[T]]
- case NothingTpe => AbsTypeTag.Nothing.asInstanceOf[AbsTypeTag[T]]
- case NullTpe => AbsTypeTag.Null.asInstanceOf[AbsTypeTag[T]]
- case _ => new AbsTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
+ case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
+ case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]]
+ case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]]
+ case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]]
+ case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]]
+ case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]]
+ case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]]
+ case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]]
+ case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]]
+ case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]]
+ case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]]
+ case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]]
+ case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]]
+ case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]]
+ case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]]
+ case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
}
- def unapply[T](ttag: AbsTypeTag[T]): Option[Type] = Some(ttag.tpe)
+ def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
}
- private class AbsTypeTagImpl[T](val mirror: Mirror, val tpec: TypeCreator) extends AbsTypeTag[T] {
+ private class WeakTypeTagImpl[T](val mirror: Mirror, val tpec: TypeCreator) extends WeakTypeTag[T] {
lazy val tpe: Type = tpec(mirror)
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # AbsTypeTag[T] = {
+ def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # WeakTypeTag[T] = {
val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- otherMirror.universe.AbsTypeTag[T](otherMirror1, tpec)
+ otherMirror.universe.WeakTypeTag[T](otherMirror1, tpec)
}
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false)
}
/**
- * If an implicit value of type u.TypeTag[T] is required, the compiler will make one up on demand following the same procedure as for TypeTags.
- * However, if the resulting type still contains references to type parameters or abstract types, a static error results.
+ * A `TypeTag` is a [[scala.reflect.base.Universe#WeakTypeTag]] with the additional
+ * static guarantee that all type references are concrete, i.e. it does <b>not</b> contain any references to
+ * unresolved type parameters or abstract types.
*
* @see [[scala.reflect.base.TypeTags]]
*/
@annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
- trait TypeTag[T] extends AbsTypeTag[T] with Equals with Serializable {
+ trait TypeTag[T] extends WeakTypeTag[T] with Equals with Serializable {
+ /**
+ * @inheritdoc
+ */
override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T]
- /** case class accessories */
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.mirror == x.asInstanceOf[TypeTag[_]].mirror && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
override def hashCode = mirror.hashCode * 31 + tpe.hashCode
@@ -230,7 +294,7 @@ trait TypeTags { self: Universe =>
def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
}
- private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends AbsTypeTagImpl[T](mirror, tpec) with TypeTag[T] {
+ private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends WeakTypeTagImpl[T](mirror, tpec) with TypeTag[T] {
override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T] = {
val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
otherMirror.universe.TypeTag[T](otherMirror1, tpec)
@@ -238,23 +302,36 @@ trait TypeTags { self: Universe =>
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
}
- private class PredefTypeCreator[T](copyIn: Universe => Universe # TypeTag[T]) extends TypeCreator {
+ private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator {
def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type = {
copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
}
}
- private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe # TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
+ private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
override lazy val tpe: Type = _tpe
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
}
- // incantations
- def absTypeTag[T](implicit attag: AbsTypeTag[T]) = attag
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]]`
+ */
+ def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]]`
+ */
def typeTag[T](implicit ttag: TypeTag[T]) = ttag
// big thanks to Viktor Klang for this brilliant idea!
- def absTypeOf[T](implicit attag: AbsTypeTag[T]): Type = attag.tpe
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]].tpe`
+ */
+ def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]].tpe`
+ */
def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
}
@@ -272,6 +349,7 @@ private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Bool
private def readResolve(): AnyRef = {
import scala.reflect.basis._
if (concrete) TypeTag(rootMirror, tpec)
- else AbsTypeTag(rootMirror, tpec)
+ else WeakTypeTag(rootMirror, tpec)
}
-} \ No newline at end of file
+}
+ \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Types.scala b/src/library/scala/reflect/base/Types.scala
index b016b77f36..b2ee3bc4d3 100644
--- a/src/library/scala/reflect/base/Types.scala
+++ b/src/library/scala/reflect/base/Types.scala
@@ -1,6 +1,14 @@
package scala.reflect
package base
+/**
+ * Defines the type hierachy for types.
+ *
+ * Note: Because of implementation details, some type factories have return type `Type`
+ * instead of a more precise type.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Types { self: Universe =>
/** The type of Scala types, and also Scala type signatures.
@@ -26,14 +34,14 @@ trait Types { self: Universe =>
*/
val NoPrefix: Type
- /** The type of Scala singleton types, i.e. types that are inhabited
+ /** The type of Scala singleton types, i.e., types that are inhabited
* by only one nun-null value. These include types of the forms
* {{{
* C.this.type
* C.super.type
* x.type
* }}}
- * as well as constant types.
+ * as well as [[ConstantType constant types]].
*/
type SingletonType >: Null <: Type
@@ -42,8 +50,8 @@ trait Types { self: Universe =>
*/
implicit val SingletonTypeTag: ClassTag[SingletonType]
- /** The `ThisType` type describes types of the form on the left with the
- * correspnding ThisType representations to the right.
+ /** A singleton type that describes types of the form on the left with the
+ * corresponding `ThisType` representation to the right:
* {{{
* C.this.type ThisType(C)
* }}}
@@ -62,7 +70,10 @@ trait Types { self: Universe =>
* where `sym` is the class prefix of the this type.
*/
abstract class ThisTypeExtractor {
- def apply(sym: Symbol): Type // not ThisTypebecause of implementation details
+ /**
+ * Creates a ThisType from the given class symbol.
+ */
+ def apply(sym: Symbol): Type
def unapply(tpe: ThisType): Option[Symbol]
}
@@ -120,7 +131,7 @@ trait Types { self: Universe =>
}
/** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant.
- * The REPL expresses constant types like Int(11). Here are some constants with their types.
+ * The REPL expresses constant types like `Int(11)`. Here are some constants with their types:
* {{{
* 1 ConstantType(Constant(1))
* "abc" ConstantType(Constant("abc"))
@@ -362,8 +373,8 @@ trait Types { self: Universe =>
* `selfSym` is a symbol representing the annotated type itself.
*/
abstract class AnnotatedTypeExtractor {
- def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
- def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
+ def apply(annotations: List[Annotation], underlying: Type, selfsym: Symbol): AnnotatedType
+ def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type, Symbol)]
}
/** The `TypeBounds` type signature is used to indicate lower and upper type bounds
@@ -401,7 +412,7 @@ trait Types { self: Universe =>
val WildcardType: Type
/** BoundedWildcardTypes, used only during type inference, are created in
- * two places that I can find:
+ * two places:
*
* 1. If the expected type of an expression is an existential type,
* its hidden symbols are replaced with bounded wildcards.
@@ -417,8 +428,12 @@ trait Types { self: Universe =>
*/
implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+ /** The constructor/deconstructor for `BoundedWildcardType` instances. */
val BoundedWildcardType: BoundedWildcardTypeExtractor
+ /** An extractor class to create and pattern match with syntax `BoundedWildcardTypeExtractor(bounds)`
+ * with `bounds` denoting the type bounds.
+ */
abstract class BoundedWildcardTypeExtractor {
def apply(bounds: TypeBounds): BoundedWildcardType
def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
diff --git a/src/library/scala/reflect/base/Universe.scala b/src/library/scala/reflect/base/Universe.scala
index f098876c18..0b5d5ed685 100644
--- a/src/library/scala/reflect/base/Universe.scala
+++ b/src/library/scala/reflect/base/Universe.scala
@@ -8,7 +8,7 @@ abstract class Universe extends Symbols
with Names
with Trees
with Constants
- with AnnotationInfos
+ with Annotations
with Positions
with Exprs
with TypeTags
@@ -18,24 +18,39 @@ abstract class Universe extends Symbols
with BuildUtils
with Mirrors
{
- /** Given an expression, generate a tree that when compiled and executed produces the original tree.
- * The produced tree will be bound to the Universe it was called from.
+ /** Produce the abstract syntax tree representing the given Scala expression.
+ *
+ * For example
+ *
+ * {{{
+ * val five = reify{ 5 } // Literal(Constant(5))
+ * reify{ 2 + 4 } // Apply( Select( Literal(Constant(2)), newTermName("$plus")), List( Literal(Constant(4)) ) )
+ * reify{ five.splice + 4 } // Apply( Select( Literal(Constant(5)), newTermName("$plus")), List( Literal(Constant(4)) ) )
+ * }}}
+ *
+ * The produced tree is path dependent on the Universe `reify` was called from.
+ *
+ * Use [[scala.reflect.base.Exprs#Expr.splice]] to embed an existing expression into a reify call. Use [[Expr]] to turn a [[Tree]] into an expression that can be spliced.
+ *
+ * == Further info and implementation details ==
+ *
+ * `reify` is implemented as a macro, which given an expression, generates a tree that when compiled and executed produces the original tree.
*
- * For instance, given the abstract syntax tree representation of the <[ x + 1 ]> expression:
+ * For instance in `reify{ x + 1 }` the macro `reify` receives the abstract syntax tree of `x + 1` as its argument, which is
*
* {{{
* Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
* }}}
*
- * The reifier transforms it to the following expression:
+ * and returns a tree, which produces the tree above, when compiled and executed. So in other terms, the refiy call expands to something like
*
* {{{
- * <[
* val $u: u.type = u // where u is a reference to the Universe that calls the reify
* $u.Expr[Int]($u.Apply($u.Select($u.Ident($u.newFreeVar("x", <Int>, x), "+"), List($u.Literal($u.Constant(1))))))
- * ]>
* }}}
- *
+ *
+ * ------
+ *
* Reification performs expression splicing (when processing Expr.splice)
* and type splicing (for every type T that has a TypeTag[T] implicit in scope):
*
@@ -46,7 +61,7 @@ abstract class Universe extends Symbols
* def macroImpl[T](c: Context) = {
* ...
* // T here is just a type parameter, so the tree produced by reify won't be of much use in a macro expansion
- * // however, if T were annotated with c.AbsTypeTag (which would declare an implicit parameter for macroImpl)
+ * // however, if T were annotated with c.WeakTypeTag (which would declare an implicit parameter for macroImpl)
* // then reification would substitute T with the TypeTree that was used in a TypeApply of this particular macro invocation
* val factory = c.reify{ new Queryable[T] }
* ...
@@ -54,13 +69,12 @@ abstract class Universe extends Symbols
* }}}
*
* The transformation looks mostly straightforward, but it has its tricky parts:
- * * Reifier retains symbols and types defined outside the reified tree, however
+ * - Reifier retains symbols and types defined outside the reified tree, however
* locally defined entities get erased and replaced with their original trees
- * * Free variables are detected and wrapped in symbols of the type FreeVar
- * * Mutable variables that are accessed from a local function are wrapped in refs
- * * Since reified trees can be compiled outside of the scope they've been created in,
- * special measures are taken to ensure that all members accessed in the reifee remain visible
+ * - Free variables are detected and wrapped in symbols of the type `FreeTermSymbol` or `FreeTypeSymbol`
+ * - Mutable variables that are accessed from a local function are wrapped in refs
*/
- // implementation is magically hardwired to `scala.reflect.reify.Taggers`
+ // implementation is hardwired to `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
def reify[T](expr: T): Expr[T] = ??? // macro
} \ No newline at end of file
diff --git a/src/library/scala/reflect/macros/internal/macroImpl.scala b/src/library/scala/reflect/macros/internal/macroImpl.scala
index a7b2bf482c..b281fb7d12 100644
--- a/src/library/scala/reflect/macros/internal/macroImpl.scala
+++ b/src/library/scala/reflect/macros/internal/macroImpl.scala
@@ -15,4 +15,4 @@ package internal
* To lessen the weirdness we define this annotation as `private[scala]`.
* It will not prevent pickling, but it will prevent application developers (and scaladocs) from seeing the annotation.
*/
-private[scala] class macroImpl(val referenceToMacroImpl: Any) extends annotation.StaticAnnotation
+private[scala] class macroImpl(val referenceToMacroImpl: Any) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/macros/internal/package.scala b/src/library/scala/reflect/macros/internal/package.scala
index 0a0e6c5b51..8457285752 100644
--- a/src/library/scala/reflect/macros/internal/package.scala
+++ b/src/library/scala/reflect/macros/internal/package.scala
@@ -4,10 +4,11 @@ import scala.reflect.base.{Universe => BaseUniverse}
import scala.reflect.ClassTag
// anchors for materialization macros emitted during tag materialization in Implicits.scala
-// implementation is magically hardwired into `scala.reflect.reify.Taggers`
+// implementation is hardwired into `scala.reflect.reify.Taggers`
+// using the mechanism implemented in `scala.tools.reflect.FastTrack`
// todo. once we have implicit macros for tag generation, we can remove these anchors
package object internal {
private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = ??? // macro
- private[scala] def materializeAbsTypeTag[T](u: BaseUniverse): u.AbsTypeTag[T] = ??? // macro
+ private[scala] def materializeWeakTypeTag[T](u: BaseUniverse): u.WeakTypeTag[T] = ??? // macro
private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = ??? // macro
}
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index d97f2ec633..046491ae10 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,5 +1,74 @@
package scala
+/**
+ * The base package of Scala's reflection library.
+ *
+ * The reflection library is structured according to the 'cake pattern'. The base layer
+ * resides in package [[scala.reflect.base]] and defines an interface to the following main types:
+ *
+ * - [[scala.reflect.base.Types#Type Types]] represent types
+ * - [[scala.reflect.base.Symbols#Symbol Symbols]] represent definitions
+ * - [[scala.reflect.base.Trees#Tree Trees]] represent abstract syntax trees
+ * - [[scala.reflect.base.Names#Name Names]] represent term and type names
+ * - [[scala.reflect.base.Annotations#Annotation Annotations]] represent annotations
+ * - [[scala.reflect.base.Positions#Position Positions]] represent source positions of tree nodes
+ * - [[scala.reflect.base.FlagSets#FlagSet FlagSet]] represent sets of flags that apply to symbols and
+ * definition trees
+ * - [[scala.reflect.base.Constants#Constant Constants]] represent compile-time constants.
+ *
+ * Each of these types are defined in their own enclosing traits, which are ultimately all inherited by class
+ * [[scala.reflect.base.Universe Universe]]. The base universe defines a minimal interface to the above types.
+ * Universes that provide additional functionality such as deeper introspection or runtime code generation,
+ * are defined in packages [[scala.reflect.api]] and `scala.tools.reflect`.
+ *
+ * The cake pattern employed here requires to write certain Scala idioms with more indirections that usual.
+ * What follows is a description of these indirections, which will help to navigate the Scaladocs easily.
+ *
+ * For instance, consider the base type of all abstract syntax trees: [[scala.reflect.base.Trees#Tree]].
+ * This type is not a class but is abstract and has an upper bound of [[scala.reflect.base.Trees#TreeBase]],
+ * which is a class defining the minimal base interface for all trees.
+ *
+ * For a more interesting tree type, consider [[scala.reflect.base.Trees#If]] representing if-expressions.
+ * It does not come with a class `IfBase`, since it does not add anything to the interface of its upper
+ * bound `TermTree`. However, it is defined next to a value `If` of type [[scala.reflect.base.Trees#IfExtractor]].
+ * This value serves as the companion object defining a factory method `apply` and a corresponding `unapply`
+ * for pattern matching.
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val cond = reify{ condition }.tree // <- just some tree representing a condition
+ * val body = Literal(Constant(1))
+ * val other = Literal(Constant(2))
+ * val iftree = If(cond,body,other)
+ * }}}
+ *
+ * is equivalent to
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val iftree = reify{ if( condition ) 1 else 2 }.tree
+ * }}}
+ *
+ * and can be pattern matched as
+ *
+ * {{{
+ * iftree match { case If(cond,body,other) => ... }
+ * }}}
+ *
+ * Moreover, there is an implicit value [[scala.reflect.base.Trees#IfTag]] of type
+ * `ClassTag[If]` that is used by the Scala compiler so that we can indeed pattern match on `If`:
+ * {{{
+ * iftree match { case _:If => ... }
+ * }}}
+ * Without the given implicit value, this pattern match would raise an "unchecked" warning at compile time
+ * since `If` is an abstract type that gets erased at runtime. See [[scala.reflect.ClassTag]] for details.
+ *
+ * To summarize: each tree type `X` (and similarly for other types such as `Type` or `Symbol`) is represented
+ * by an abstract type `X`, optionally together with a class `XBase` that defines `X`'s' interface.
+ * `X`'s companion object, if it exists, is represented by a value `X` that is of type `XExtractor`.
+ * Moreover, for each type `X`, there is a value `XTag` of type `ClassTag[X]` that allows to pattern match
+ * on `X`.
+ */
package object reflect {
lazy val basis: base.Universe = new base.Base
diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala
index bf6b440031..36893da298 100644
--- a/src/library/scala/remote.scala
+++ b/src/library/scala/remote.scala
@@ -24,4 +24,4 @@ package scala
* }
* }}}
*/
-class remote extends annotation.StaticAnnotation {}
+class remote extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index f499350ce9..c1f245590b 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -8,7 +8,8 @@
package scala.runtime
-/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction` in terms of `isDefinedAt` and `applyOrElse`.
+/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
+ * in terms of `isDefinedAt` and `applyOrElse`.
*
* This allows more efficient implementations in many cases:
* - optimized `orElse` method supports chained `orElse` in linear time,
@@ -16,12 +17,7 @@ package scala.runtime
* - optimized `lift` method helps to avoid double evaluation of pattern matchers & guards
* of partial function literals.
*
- * This trait is used as a basis for implementation of all partial function literals
- * with non-exhaustive matchers.
- *
- * Use of `AbstractPartialFunction` instead of `PartialFunction` as a base trait for
- * user-defined partial functions may result in better performance
- * and more predictable behavior w.r.t. side effects.
+ * This trait is used as a basis for implementation of all partial function literals.
*
* @author Pavel Pavlov
* @since 2.10
@@ -35,34 +31,4 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
// probably okay to make final since classes compiled before have overridden against the old version of AbstractPartialFunction
// let's not make it final so as not to confuse anyone
/*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
-
- @annotation.unspecialized override final def andThen[C](k: R => C) : PartialFunction[T1, C] =
- new AbstractPartialFunction[T1, C] {
- def isDefinedAt(x: T1): Boolean = self.isDefinedAt(x)
- override def applyOrElse[A1 <: T1, C1 >: C](x: A1, default: A1 => C1): C1 =
- self.applyOrElse(x, PartialFunction.fallbackToken) match {
- case PartialFunction.FallbackToken => default(x)
- case z => k(z)
- }
- }
-
- // TODO: remove
- protected def missingCase(x: T1): R = throw new MatchError(x)
-}
-
-
-/** `AbstractTotalFunction` is a partial function whose `isDefinedAt` method always returns `true`.
- *
- * This class is used as base class for partial function literals with
- * certainly exhaustive pattern matchers.
- *
- * @author Pavel Pavlov
- * @since 2.10
- */
-abstract class AbstractTotalFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] {
- final def isDefinedAt(x: T1): Boolean = true
- @annotation.unspecialized override final def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = apply(x)
- @annotation.unspecialized override final def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = this
- //TODO: check generated code for PF literal here
- @annotation.unspecialized override final def andThen[C](k: R => C): PartialFunction[T1, C] = { case x => k(apply(x)) }
}
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index a14160a71e..92cc6ccf98 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -8,6 +8,6 @@
package scala.runtime
-final class RichBoolean(val self: Boolean) extends OrderedProxy[Boolean] {
- protected val ord = math.Ordering[Boolean]
+final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] {
+ protected def ord = scala.math.Ordering.Boolean
}
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index c42a2dd183..9d88ed3689 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -8,4 +8,7 @@
package scala.runtime
-final class RichByte(val self: Byte) extends ScalaWholeNumberProxy[Byte] { }
+final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
+ protected def num = scala.math.Numeric.ByteIsIntegral
+ protected def ord = scala.math.Ordering.Byte
+}
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index ba939d6633..918fe70f5c 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -10,7 +10,10 @@ package scala.runtime
import java.lang.Character
-final class RichChar(val self: Char) extends IntegralProxy[Char] {
+final class RichChar(val self: Char) extends AnyVal with IntegralProxy[Char] {
+ protected def num = scala.math.Numeric.CharIsIntegral
+ protected def ord = scala.math.Ordering.Char
+
def asDigit: Int = Character.digit(self, Character.MAX_RADIX)
def isControl: Boolean = Character.isISOControl(self)
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index 396323d1e8..d7d2603ef7 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -6,10 +6,13 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-final class RichDouble(val self: Double) extends FractionalProxy[Double] {
- protected val integralNum = Numeric.DoubleAsIfIntegral
+final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] {
+ protected def num = scala.math.Numeric.DoubleIsFractional
+ protected def ord = scala.math.Ordering.Double
+ protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral
def round: Long = math.round(self)
def ceil: Double = math.ceil(self)
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index bb20ec61bb..b9289562f8 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -8,7 +8,7 @@
package scala.runtime
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
final class RichException(exc: Throwable) {
def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index 4fc9e8864a..9c3a14d3be 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -6,10 +6,13 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-final class RichFloat(val self: Float) extends FractionalProxy[Float] {
- protected val integralNum = Numeric.FloatAsIfIntegral
+final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] {
+ protected def num = scala.math.Numeric.FloatIsFractional
+ protected def ord = scala.math.Ordering.Float
+ protected def integralNum = scala.math.Numeric.FloatAsIfIntegral
def round: Int = math.round(self)
def ceil: Float = math.ceil(self).toFloat
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index d03968212f..619574264a 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -12,7 +12,9 @@ import scala.collection.immutable.Range
// Note that this does not implement IntegralProxy[Int] so that it can return
// the Int-specific Range class from until/to.
-final class RichInt(val self: Int) extends ScalaNumberProxy[Int] with RangedProxy[Int] {
+final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] with RangedProxy[Int] {
+ protected def num = scala.math.Numeric.IntIsIntegral
+ protected def ord = scala.math.Ordering.Int
type ResultWithoutStep = Range
/**
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index 5784934ffd..7c052851a9 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -8,7 +8,10 @@
package scala.runtime
-final class RichLong(val self: Long) extends IntegralProxy[Long] {
+final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
+ protected def num = scala.math.Numeric.LongIsIntegral
+ protected def ord = scala.math.Ordering.Long
+
def toBinaryString: String = java.lang.Long.toBinaryString(self)
def toHexString: String = java.lang.Long.toHexString(self)
def toOctalString: String = java.lang.Long.toOctalString(self)
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index a174438c06..4dfa237b38 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -8,4 +8,7 @@
package scala.runtime
-final class RichShort(val self: Short) extends ScalaWholeNumberProxy[Short] { }
+final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] {
+ protected def num = scala.math.Numeric.ShortIsIntegral
+ protected def ord = scala.math.Ordering.Short
+}
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index d9b9a7843f..df2d209e3e 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -9,7 +9,7 @@
package scala.runtime
import scala.collection.{ mutable, immutable }
-import math.ScalaNumericConversions
+import scala.math.ScalaNumericConversions
import immutable.NumericRange
import Proxy.Typed
@@ -20,9 +20,8 @@ import Proxy.Typed
* @version 2.9
* @since 2.9
*/
-abstract class ScalaNumberProxy[T: Numeric] extends ScalaNumericConversions with Typed[T] with OrderedProxy[T] {
- private val num = implicitly[Numeric[T]]
- protected val ord: Ordering[T] = num
+trait ScalaNumberProxy[T] extends Any with ScalaNumericConversions with Typed[T] with OrderedProxy[T] {
+ protected implicit def num: Numeric[T]
def underlying() = self.asInstanceOf[AnyRef]
def doubleValue() = num.toDouble(self)
@@ -35,11 +34,11 @@ abstract class ScalaNumberProxy[T: Numeric] extends ScalaNumericConversions with
def abs = num.abs(self)
def signum = num.signum(self)
}
-abstract class ScalaWholeNumberProxy[T: Numeric] extends ScalaNumberProxy[T] {
+trait ScalaWholeNumberProxy[T] extends Any with ScalaNumberProxy[T] {
def isWhole() = true
}
-abstract class IntegralProxy[T : Integral] extends ScalaWholeNumberProxy[T] with RangedProxy[T] {
- private lazy val num = implicitly[Integral[T]]
+trait IntegralProxy[T] extends Any with ScalaWholeNumberProxy[T] with RangedProxy[T] {
+ protected implicit def num: Integral[T]
type ResultWithoutStep = NumericRange[T]
def until(end: T): NumericRange.Exclusive[T] = NumericRange(self, end, num.one)
@@ -47,17 +46,17 @@ abstract class IntegralProxy[T : Integral] extends ScalaWholeNumberProxy[T] with
def to(end: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, num.one)
def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step)
}
-abstract class FractionalProxy[T : Fractional] extends ScalaNumberProxy[T] with RangedProxy[T] {
- def isWhole() = false
+trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] with RangedProxy[T] {
+ protected implicit def num: Fractional[T]
+ protected implicit def integralNum: Integral[T]
/** In order to supply predictable ranges, we require an Integral[T] which provides
* us with discrete operations on the (otherwise fractional) T. See Numeric.DoubleAsIfIntegral
* for an example.
*/
- protected implicit def integralNum: Integral[T]
- private lazy val num = implicitly[Fractional[T]]
type ResultWithoutStep = Range.Partial[T, NumericRange[T]]
+ def isWhole() = false
def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _))
def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step)
def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _))
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index e5f5e9dc5d..c7f1d2fcac 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
@@ -15,6 +16,7 @@ import scala.collection.generic.{ Sorted }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import scala.xml.{ Node, MetaData }
+import java.lang.{ Class => jClass }
import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
@@ -28,10 +30,10 @@ object ScalaRunTime {
def isArray(x: Any, atLevel: Int): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
- private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
+ private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
+ def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
def isTuple(x: Any) = x != null && tupleNames(x.getClass.getName)
def isAnyVal(x: Any) = x match {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
@@ -50,7 +52,7 @@ object ScalaRunTime {
/** Return the class object representing an array with element class `clazz`.
*/
- def arrayClass(clazz: Class[_]): Class[_] = {
+ def arrayClass(clazz: jClass[_]): jClass[_] = {
// newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
@@ -58,18 +60,19 @@ object ScalaRunTime {
/** Return the class object representing elements in arrays described by a given schematic.
*/
- def arrayElementClass(schematic: Any): Class[_] = schematic match {
- case cls: Class[_] => cls.getComponentType
+ def arrayElementClass(schematic: Any): jClass[_] = schematic match {
+ case cls: jClass[_] => cls.getComponentType
case tag: ClassTag[_] => tag.runtimeClass
- case _ => throw new UnsupportedOperationException("unsupported schematic %s (%s)".format(schematic, if (schematic == null) "null" else schematic.getClass))
+ case _ =>
+ throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
}
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
- def anyValClass[T <: AnyVal : ClassTag](value: T): Class[T] =
- classTag[T].runtimeClass.asInstanceOf[Class[T]]
+ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
+ classTag[T].runtimeClass.asInstanceOf[jClass[T]]
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = xs match {
@@ -144,7 +147,7 @@ object ScalaRunTime {
dest
}
- def toArray[T](xs: collection.Seq[T]) = {
+ def toArray[T](xs: scala.collection.Seq[T]) = {
val arr = new Array[AnyRef](xs.length)
var i = 0
for (x <- xs) {
@@ -167,35 +170,6 @@ object ScalaRunTime {
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
- abstract class Try[+A] {
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B
- def Finally(fin: => Unit): A
- }
-
- def Try[A](block: => A): Try[A] = new Try[A] with Runnable {
- private var result: A = _
- private var exception: Throwable =
- try { run() ; null }
- catch {
- case e: ControlThrowable => throw e // don't catch non-local returns etc
- case e: Throwable => e
- }
-
- def run() { result = block }
-
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B =
- if (exception == null) result
- else if (handler isDefinedAt exception) handler(exception)
- else throw exception
-
- def Finally(fin: => Unit): A = {
- fin
-
- if (exception == null) result
- else throw exception
- }
- }
-
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
@@ -279,7 +253,7 @@ object ScalaRunTime {
* it's performing a series of Any/Any equals comparisons anyway.
* See ticket #2867 for specifics.
*/
- def sameElements(xs1: collection.Seq[Any], xs2: collection.Seq[Any]) = xs1 sameElements xs2
+ def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
/** Given any Scala value, convert it to a String.
*
@@ -346,7 +320,7 @@ object ScalaRunTime {
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x
case x if useOwnToString(x) => x.toString
case x: AnyRef if isArray(x) => arrayToString(x)
- case x: collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index 8ef1a9a33e..8cb958c05f 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import java.util.Arrays.copyOfRange
-final class SeqCharSequence(val xs: collection.IndexedSeq[Char]) extends CharSequence {
+final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = xs.length
def charAt(index: Int): Char = xs(index)
def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(xs.slice(start, end))
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index a7e78ea9a3..f074b5407e 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -9,14 +9,6 @@
package scala.runtime
/** A wrapper class that adds string concatenation `+` to any value */
-final class StringAdd(val self: Any) {
-
- // Note: The implicit conversion from Any to StringAdd is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
-
+final class StringAdd(val self: Any) extends AnyVal {
def +(other: String) = String.valueOf(self) + other
-
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index c120cbb14d..7d34e82812 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -10,18 +10,10 @@ package scala.runtime
/** A wrapper class that adds a `formatted` operation to any value
*/
-final class StringFormat(val self: Any) {
-
- // Note: The implicit conversion from Any to StringFormat is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
-
+final class StringFormat(val self: Any) extends AnyVal {
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
* (@see java.lang.String.format).
*/
@inline def formatted(fmtstr: String): String = fmtstr format self
-
}
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index dce7eef08d..6030c9ea90 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -10,33 +10,32 @@ package scala.runtime
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
-import language.{ higherKinds, implicitConversions }
+import scala.language.{ higherKinds, implicitConversions }
/** This interface is intended as a minimal interface, not complicated
* by the requirement to resolve type constructors, for implicit search (which only
* needs to find an implicit conversion to Traversable for our purposes.)
*/
-trait ZippedTraversable2[+El1, +El2] {
+trait ZippedTraversable2[+El1, +El2] extends Any {
def foreach[U](f: (El1, El2) => U): Unit
}
object ZippedTraversable2 {
implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = {
- new collection.AbstractTraversable[(El1, El2)] {
+ new scala.collection.AbstractTraversable[(El1, El2)] {
def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
}
}
}
-class Tuple2Zipped[El1, Repr1, El2, Repr2](
- coll1: TraversableLike[El1, Repr1],
- coll2: IterableLike[El2, Repr2]
-) extends ZippedTraversable2[El1, El2] {
+final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] {
+ // This would be better as "private def coll1 = colls._1" but
+ // SI-6215 precludes private methods in value classes.
def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- b.sizeHint(coll1)
- val elems2 = coll2.iterator
+ val b = cbf(colls._1.repr)
+ b.sizeHint(colls._1)
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext)
b += f(el1, elems2.next)
else
@@ -47,10 +46,10 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext)
b ++= f(el1, elems2.next)
else
@@ -61,11 +60,11 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val elems2 = coll2.iterator
+ val b1 = cbf1(colls._1.repr)
+ val b2 = cbf2(colls._2.repr)
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext) {
val el2 = elems2.next
if (f(el1, el2)) {
@@ -80,9 +79,9 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
def exists(f: (El1, El2) => Boolean): Boolean = {
- val elems2 = coll2.iterator
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext) {
if (f(el1, elems2.next))
return true
@@ -96,9 +95,9 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
!exists((x, y) => !f(x, y))
def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = coll2.iterator
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext)
f(el1, elems2.next)
else
@@ -108,24 +107,24 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
object Tuple2Zipped {
- class Ops[T1, T2](x: (T1, T2)) {
+ final class Ops[T1, T2](val x: (T1, T2)) extends AnyVal {
def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That]
(implicit w1: T1 <:< CC1[El1],
w2: T2 <:< CC2[El2],
- bf: collection.generic.CanBuildFrom[CC1[_], (El1, El2), That]
+ bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2), That]
): That = {
val buf = bf(x._1)
val it1 = x._1.toIterator
val it2 = x._2.toIterator
while (it1.hasNext && it2.hasNext)
buf += ((it1.next, it2.next))
-
+
buf.result
}
def zipped[El1, Repr1, El2, Repr2]
(implicit w1: T1 => TraversableLike[El1, Repr1],
w2: T2 => IterableLike[El2, Repr2]
- ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped(x._1, x._2)
+ ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((x._1, x._2))
}
}
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index f3ca08649d..3970c9973d 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -10,31 +10,29 @@ package scala.runtime
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
-import language.{ higherKinds, implicitConversions }
+import scala.language.{ higherKinds, implicitConversions }
/** See comment on ZippedTraversable2. */
-trait ZippedTraversable3[+El1, +El2, +El3] {
+trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
def foreach[U](f: (El1, El2, El3) => U): Unit
}
object ZippedTraversable3 {
implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = {
- new collection.AbstractTraversable[(El1, El2, El3)] {
+ new scala.collection.AbstractTraversable[(El1, El2, El3)] {
def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
}
}
}
-class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
- coll1: TraversableLike[El1, Repr1],
- coll2: IterableLike[El2, Repr2],
- coll3: IterableLike[El3, Repr3]
-) extends ZippedTraversable3[El1, El2, El3] {
+final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3]))
+ extends AnyVal with ZippedTraversable3[El1, El2, El3] {
+
def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
b += f(el1, elems2.next, elems3.next)
else
@@ -44,11 +42,11 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
}
def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
b ++= f(el1, elems2.next, elems3.next)
else
@@ -61,14 +59,14 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
implicit cbf1: CBF[Repr1, El1, To1],
cbf2: CBF[Repr2, El2, To2],
cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val b3 = cbf3(coll3.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val b1 = cbf1(colls._1.repr)
+ val b2 = cbf2(colls._2.repr)
+ val b3 = cbf3(colls._3.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
def result = (b1.result, b2.result, b3.result)
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
val el2 = elems2.next
val el3 = elems3.next
@@ -86,10 +84,10 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
}
def exists(f: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
if (f(el1, elems2.next, elems3.next))
return true
@@ -103,10 +101,10 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
!exists((x, y, z) => !f(x, y, z))
def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
f(el1, elems2.next, elems3.next)
else
@@ -116,12 +114,12 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
}
object Tuple3Zipped {
- class Ops[T1, T2, T3](x: (T1, T2, T3)) {
+ final class Ops[T1, T2, T3](val x: (T1, T2, T3)) extends AnyVal {
def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That]
(implicit w1: T1 <:< CC1[El1],
w2: T2 <:< CC2[El2],
w3: T3 <:< CC3[El3],
- bf: collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That]
+ bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That]
): That = {
val buf = bf(x._1)
val it1 = x._1.toIterator
@@ -129,14 +127,14 @@ object Tuple3Zipped {
val it3 = x._3.toIterator
while (it1.hasNext && it2.hasNext && it3.hasNext)
buf += ((it1.next, it2.next, it3.next))
-
+
buf.result
}
-
+
def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
(implicit w1: T1 => TraversableLike[El1, Repr1],
w2: T2 => IterableLike[El2, Repr2],
w3: T3 => IterableLike[El3, Repr3]
- ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped(x._1, x._2, x._3)
+ ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped((x._1, x._2, x._3))
}
}
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
index 6f2a4d382d..a003bba034 100644
--- a/src/library/scala/runtime/WorksheetSupport.scala
+++ b/src/library/scala/runtime/WorksheetSupport.scala
@@ -40,9 +40,9 @@ object WorksheetSupport {
write((currentOffset+" ").getBytes)
}
out.write(c)
- col =
+ col =
if (c == '\n') -1
- else if (c == '\t') (col / tabInc) * tabInc + tabInc
+ else if (c == '\t') (col / tabInc) * tabInc + tabInc
else col + 1
if (col >= width) writeOne('\n')
}
@@ -86,7 +86,7 @@ object WorksheetSupport {
def $stop() = throw new StopException
- def $show(x: Any): String = stringOf(x, scala.Int.MaxValue)
+ def $show(x: Any): String = stringOf(x)
}
class StopException extends Exception
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index 761c7cb25e..d349b7e0c2 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -24,9 +24,9 @@ import Specializable._
*
* @since 2.8
*/
-// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation {
+// class tspecialized[T](group: Group[T]) extends scala.annotation.StaticAnnotation {
-class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation {
+class specialized(group: SpecializedGroup) extends scala.annotation.StaticAnnotation {
def this(types: Specializable*) = this(new Group(types.toList))
def this() = this(Primitives)
}
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index 45fc6f5897..7213fdeb65 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -8,7 +8,7 @@
package scala.sys
-import language.implicitConversions
+import scala.language.implicitConversions
/** A few additional conveniences for Boolean properties.
*/
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 687a32cf7d..123a729748 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A lightweight interface wrapping a property contained in some
* unspecified map. Generally it'll be the system properties but this
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index d5777922b4..5777c255c3 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -11,7 +11,7 @@ package scala.sys
import scala.collection.{ mutable, Iterator }
import scala.collection.JavaConverters._
import java.security.AccessControlException
-import language.implicitConversions
+import scala.language.implicitConversions
/** A bidirectional map wrapping the java System properties.
diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala
index 119ab59c22..445b30e480 100644
--- a/src/library/scala/sys/package.scala
+++ b/src/library/scala/sys/package.scala
@@ -9,7 +9,7 @@
package scala
import scala.collection.immutable
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
/** The package object `scala.sys` contains methods for reading
* and altering core aspects of the virtual machine as well as the
@@ -85,4 +85,4 @@ package object sys {
tarray take got
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 77e36f6196..94a2125393 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -45,7 +45,7 @@ object BasicIO {
val q = new LinkedBlockingQueue[Either[Int, T]]
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
- case Left(code) => if (nonzeroException) sys.error("Nonzero exit code: " + code) else Stream.empty
+ case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
case Right(s) => Stream.cons(s, next)
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index d56c6f2c9d..4950758a1a 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -11,7 +11,7 @@ package process
import processInternal._
import ProcessBuilder._
-import language.implicitConversions
+import scala.language.implicitConversions
/** Represents a process that is running or has finished running.
* It may be a compound process with several underlying native processes (such as `a #&& b`).
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 58f06e1039..2c83a59e4f 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -128,7 +128,7 @@ private[process] trait ProcessBuilderImpl {
val code = this ! BasicIO(withIn, buffer, log)
if (code == 0) buffer.toString
- else sys.error("Nonzero exit value: " + code)
+ else scala.sys.error("Nonzero exit value: " + code)
}
private[this] def lines(
@@ -213,4 +213,4 @@ private[process] trait ProcessBuilderImpl {
) extends SequentialBuilder(first, second, "###") {
override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io)
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index b7549eeb06..cdf7d72caa 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -84,7 +84,7 @@ private[process] trait ProcessImpl {
private[process] abstract class CompoundProcess extends BasicProcess {
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse sys.error("No exit code: process destroyed.")
+ def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
def start() = getExitValue
protected lazy val (getExitValue, destroyer) = {
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index c1bf470831..7c73fd587c 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -205,7 +205,7 @@ package scala.sys {
package object process extends ProcessImplicits {
/** The arguments passed to `java` when creating this process */
def javaVmArguments: List[String] = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
}
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
index 9acae34d4e..3794fb3f2b 100644
--- a/src/library/scala/testing/Benchmark.scala
+++ b/src/library/scala/testing/Benchmark.scala
@@ -8,7 +8,7 @@
package scala.testing
-import compat.Platform
+import scala.compat.Platform
/** `Benchmark` can be used to quickly turn an existing class into a
* benchmark. Here is a short example:
@@ -33,6 +33,7 @@ import compat.Platform
*
* @author Iulian Dragos, Burak Emir
*/
+@deprecated("This class will be removed.", "2.10.0")
trait Benchmark {
/** this method should be implemented by the concrete benchmark.
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index 5ab46b8985..da1868c7f6 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -25,6 +25,7 @@ package scala.testing
* where `&lt;result&gt;` is the result of evaluating the call.
*
*/
+@deprecated("This class will be removed.", "2.10.0")
trait Show {
/** An implicit definition that adds an apply method to Symbol which forwards to `test`.
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 4621c789ab..0aa0d31c9f 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -23,4 +23,4 @@ package scala
* @version 1.0, 19/05/2006
* @since 2.1
*/
-class throws(clazz: Class[_]) extends annotation.StaticAnnotation
+class throws(clazz: Class[_]) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala
index 3dcff0664c..36dcb996cf 100644
--- a/src/library/scala/transient.scala
+++ b/src/library/scala/transient.scala
@@ -10,7 +10,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
@field
-class transient extends annotation.StaticAnnotation
+class transient extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/unchecked.scala b/src/library/scala/unchecked.scala
index 5b05792d97..281f2ef4d7 100644
--- a/src/library/scala/unchecked.scala
+++ b/src/library/scala/unchecked.scala
@@ -33,4 +33,4 @@ package scala
*
* @since 2.4
*/
-class unchecked extends annotation.Annotation {}
+class unchecked extends scala.annotation.Annotation {}
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index dcfdc16d33..f0253eee07 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -10,7 +10,7 @@
package scala.util
-import language.implicitConversions
+import scala.language.implicitConversions
/** Represents a value of one of two possible types (a disjoint union.)
* Instances of Either are either an instance of [[scala.util.Left]] or [[scala.util.Right]].
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index 029fe095af..c087b0d8c8 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -178,7 +178,7 @@ object MurmurHash {
* where the order of appearance of elements does not matter.
* This is useful for hashing sets, for example.
*/
- def symmetricHash[T](xs: collection.TraversableOnce[T], seed: Int) = {
+ def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = {
var a,b,n = 0
var c = 1
xs.seq.foreach(i => {
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 65a1b8c685..85ac27e95c 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -8,10 +8,10 @@
package scala.util
-import collection.mutable.ArrayBuffer
-import collection.generic.CanBuildFrom
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.{ List, Stream }
-import language.{implicitConversions, higherKinds}
+import scala.language.{implicitConversions, higherKinds}
/**
* @author Stephane Micheloud
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 5f0edf964f..276e157f55 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.reflect.{ ClassTag, classTag }
-import scala.math.Ordering
+import scala.math.{ Ordering, max, min }
/** The Sorting object provides functions that can sort various kinds of
* objects. You can provide a comparison function, or you can request a sort
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index f381a18b0c..fe409c2d7a 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -8,9 +8,9 @@
package scala.util
-import collection.Seq
+import scala.collection.Seq
import scala.util.control.NonFatal
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* The `Try` type represents a computation that may either result in an exception, or return a
@@ -52,6 +52,8 @@ import language.implicitConversions
* ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
* Serious system errors, on the other hand, will be thrown.
*
+ * ''Note:'': all Try combinators will catch exceptions and return failure unless otherwise specified in the documentation.
+ *
* `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack.
*
* @author based on Twitter's original implementation in com.twitter.util.
@@ -68,12 +70,19 @@ sealed abstract class Try[+T] {
def isSuccess: Boolean
/** Returns the value from this `Success` or the given `default` argument if this is a `Failure`.
+ *
+ * ''Note:'': This will throw an exception if it is not a success and default throws an exception.
*/
- def getOrElse[U >: T](default: => U) = if (isSuccess) get else default
+ def getOrElse[U >: T](default: => U): U =
+ if (isSuccess) get else default
/** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
*/
- def orElse[U >: T](default: => Try[U]) = if (isSuccess) this else default
+ def orElse[U >: T](default: => Try[U]): Try[U] =
+ try if (isSuccess) this else default
+ catch {
+ case NonFatal(e) => Failure(e)
+ }
/** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
@@ -81,6 +90,8 @@ sealed abstract class Try[+T] {
/**
* Applies the given function `f` if this is a `Success`, otherwise returns `Unit` if this is a `Failure`.
+ *
+ * ''Note:'' If `f` throws, then this method may throw an exception.
*/
def foreach[U](f: T => U): Unit
@@ -114,7 +125,7 @@ sealed abstract class Try[+T] {
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
- def toOption = if (isSuccess) Some(get) else None
+ def toOption: Option[T] = if (isSuccess) Some(get) else None
/**
* Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
@@ -131,20 +142,25 @@ sealed abstract class Try[+T] {
/** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
* `s` if this is a `Success`.
*/
- def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this match {
- case Success(v) => s(v)
- case Failure(e) => f(e)
- }
+ def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ try this match {
+ case Success(v) => s(v)
+ case Failure(e) => f(e)
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
}
object Try {
-
- def apply[T](r: => T): Try[T] = {
- try { Success(r) } catch {
+ /** Constructs a `Try` using the by-name parameter. This
+ * method will ensure any non-fatal exception is caught and a
+ * `Failure` object is returned.
+ */
+ def apply[T](r: => T): Try[T] =
+ try Success(r) catch {
case NonFatal(e) => Failure(e)
}
- }
}
@@ -152,24 +168,25 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
def isFailure: Boolean = true
def isSuccess: Boolean = false
def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
- if (f.isDefinedAt(exception)) f(exception) else this
+ try {
+ if (f isDefinedAt exception) f(exception) else this
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
def get: T = throw exception
- def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = Failure[U](exception)
- def foreach[U](f: T => U): Unit = {}
- def map[U](f: T => U): Try[U] = Failure[U](exception)
+ def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ def foreach[U](f: T => U): Unit = ()
+ def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = {
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
try {
- if (rescueException.isDefinedAt(exception)) {
+ if (rescueException isDefinedAt exception) {
Try(rescueException(exception))
- } else {
- this
- }
+ } else this
} catch {
case NonFatal(e) => Failure(e)
}
- }
def failed: Try[Throwable] = Success(exception)
}
@@ -177,7 +194,7 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
final case class Success[+T](value: T) extends Try[T] {
def isFailure: Boolean = false
def isSuccess: Boolean = true
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = Success(value)
+ def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this
def get = value
def flatMap[U](f: T => Try[U]): Try[U] =
try f(value)
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 1cdcd734cd..25ac86183c 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -19,8 +19,8 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
- var indexMap = collection.Map[immutable.BitSet, Int]()
- var invIndexMap = collection.Map[Int, immutable.BitSet]()
+ var indexMap = scala.collection.Map[immutable.BitSet, Int]()
+ var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
var ix = 0
// we compute the dfa with states = bitsets
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index 1567e06c22..28e4db2038 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -9,10 +9,10 @@
package scala.util
package control
-import collection.immutable.List
-import reflect.{ ClassTag, classTag }
+import scala.collection.immutable.List
+import scala.reflect.{ ClassTag, classTag }
import java.lang.reflect.InvocationTargetException
-import language.implicitConversions
+import scala.language.implicitConversions
/** Classes representing the components of exception handling.
@@ -27,7 +27,7 @@ import language.implicitConversions
* }}}
*
* This class differs from `scala.util.Try` in that it focuses on composing exception handlers rather than
- * composing behavior. All behavior should be composed first and fed to a `Catch` object using one of the
+ * composing behavior. All behavior should be composed first and fed to a `Catch` object using one of the
* `opt` or `either` methods.
*
* @author Paul Phillips
@@ -218,7 +218,7 @@ object Exception {
}
/** Private **/
- private def wouldMatch(x: Throwable, classes: collection.Seq[Class[_]]): Boolean =
+ private def wouldMatch(x: Throwable, classes: scala.collection.Seq[Class[_]]): Boolean =
classes exists (_ isAssignableFrom x.getClass)
private def pfFromExceptions(exceptions: Class[_]*): PartialFunction[Throwable, Nothing] =
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index c2b5dbca22..4409358785 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A trait for exceptions which, for efficiency reasons, do not
* fill in the stack trace. Stack trace suppression can be disabled
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index 84b549f35e..97d32af2b0 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -8,6 +8,8 @@
package scala.util.hashing
+import scala.annotation.implicitNotFound
+
/** `Hashing` is a trait whose instances each represent a strategy for hashing
* instances of a type.
*
@@ -16,27 +18,22 @@ package scala.util.hashing
*
* Note: when using a custom `Hashing`, make sure to use it with the `Equiv`
* such that if any two objects are equal, then their hash codes must be equal.
- *
+ *
* @since 2.10
*/
-@annotation.implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
+@implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
trait Hashing[T] extends Serializable {
-
def hash(x: T): Int
-
}
-
object Hashing {
-
final class Default[T] extends Hashing[T] {
def hash(x: T) = x.##
}
-
+
implicit def default[T] = new Default[T]
-
+
def fromFunction[T](f: T => Int) = new Hashing[T] {
def hash(x: T) = f(x)
}
-
}
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 1cfb8276fe..8174f09bb2 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -158,7 +158,7 @@ private[hashing] class MurmurHash3 {
finalizeHash(h, data.length)
}
- final def listHash(xs: collection.immutable.List[_], seed: Int): Int = {
+ final def listHash(xs: scala.collection.immutable.List[_], seed: Int): Int = {
var n = 0
var h = seed
var elems = xs
@@ -213,45 +213,45 @@ object MurmurHash3 extends MurmurHash3 {
/** To offer some potential for optimization.
*/
- def seqHash(xs: collection.Seq[_]): Int = xs match {
+ def seqHash(xs: scala.collection.Seq[_]): Int = xs match {
case xs: List[_] => listHash(xs, seqSeed)
case xs => orderedHash(xs, seqSeed)
}
- def mapHash(xs: collection.Map[_, _]): Int = unorderedHash(xs, mapSeed)
- def setHash(xs: collection.Set[_]): Int = unorderedHash(xs, setSeed)
+ def mapHash(xs: scala.collection.Map[_, _]): Int = unorderedHash(xs, mapSeed)
+ def setHash(xs: scala.collection.Set[_]): Int = unorderedHash(xs, setSeed)
class ArrayHashing[@specialized T] extends Hashing[Array[T]] {
def hash(a: Array[T]) = arrayHash(a)
}
-
+
def arrayHashing[@specialized T] = new ArrayHashing[T]
-
+
def bytesHashing = new Hashing[Array[Byte]] {
def hash(data: Array[Byte]) = bytesHash(data)
}
-
+
def orderedHashing = new Hashing[TraversableOnce[Any]] {
def hash(xs: TraversableOnce[Any]) = orderedHash(xs)
}
-
+
def productHashing = new Hashing[Product] {
def hash(x: Product) = productHash(x)
}
-
+
def stringHashing = new Hashing[String] {
def hash(x: String) = stringHash(x)
}
-
+
def unorderedHashing = new Hashing[TraversableOnce[Any]] {
def hash(xs: TraversableOnce[Any]) = unorderedHash(xs)
}
-
+
/** All this trouble and foreach still appears faster.
* Leaving in place in case someone would like to investigate further.
*/
/**
- def linearSeqHash(xs: collection.LinearSeq[_], seed: Int): Int = {
+ def linearSeqHash(xs: scala.collection.LinearSeq[_], seed: Int): Int = {
var n = 0
var h = seed
var elems = xs
@@ -263,7 +263,7 @@ object MurmurHash3 extends MurmurHash3 {
finalizeHash(h, n)
}
- def indexedSeqHash(xs: collection.IndexedSeq[_], seed: Int): Int = {
+ def indexedSeqHash(xs: scala.collection.IndexedSeq[_], seed: Int): Int = {
var n = 0
var h = seed
val len = xs.length
@@ -276,10 +276,10 @@ object MurmurHash3 extends MurmurHash3 {
*/
@deprecated("Use unorderedHash", "2.10.0")
- final def symmetricHash[T](xs: collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int =
+ final def symmetricHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int =
unorderedHash(xs.seq, seed)
@deprecated("Use orderedHash", "2.10.0")
- final def traversableHash[T](xs: collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int =
+ final def traversableHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int =
orderedHash(xs.seq, seed)
}
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
index 58284797b4..1d9a4deb62 100644
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ b/src/library/scala/util/logging/ConsoleLogger.scala
@@ -17,6 +17,7 @@ package scala.util.logging
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed.", "2.10.0")
trait ConsoleLogger extends Logged {
/** logs argument to Console using [[scala.Console.println]]
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
index d23b38c569..1476c8bf08 100644
--- a/src/library/scala/util/logging/Logged.scala
+++ b/src/library/scala/util/logging/Logged.scala
@@ -22,6 +22,7 @@ package scala.util.logging
* }}}
* and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
*/
+@deprecated("This class will be removed.", "2.10.0")
trait Logged {
/** This method should log the message given as argument somewhere
* as a side-effect.
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index b93c24fde4..fc3b36a4e0 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -10,7 +10,7 @@ package scala.util.parsing.ast
import scala.collection.AbstractIterable
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
//DISCLAIMER: this code is highly experimental!
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index 270ac680a9..5b616e9e13 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -9,7 +9,7 @@
package scala.util.parsing.combinator
-import language.implicitConversions
+import scala.language.implicitConversions
/** This object contains implicit conversions that come in handy when using the `^^` combinator.
*
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index 06567ea348..520ac8cc2c 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -9,7 +9,7 @@
package scala.util.parsing.combinator
-import annotation.migration
+import scala.annotation.migration
/** `JavaTokenParsers` differs from [[scala.util.parsing.combinator.RegexParsers]]
* by adding the following definitions:
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index 9516df0093..91642da229 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -11,7 +11,7 @@ package scala.util.parsing.combinator
import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* `PackratParsers` is a component that extends the parser combinators
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index eaaea583ce..5d990eee78 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -11,8 +11,8 @@ package scala.util.parsing.combinator
import scala.util.parsing.input._
import scala.collection.mutable.ListBuffer
import scala.annotation.tailrec
-import annotation.migration
-import language.implicitConversions
+import scala.annotation.migration
+import scala.language.implicitConversions
import scala.util.DynamicVariable
// TODO: better error handling (labelling like parsec's <?>)
@@ -178,7 +178,7 @@ trait Parsers {
def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
- def get: Nothing = sys.error("No result when parsing failed")
+ def get: Nothing = scala.sys.error("No result when parsing failed")
}
/** An extractor so `NoSuccess(msg, next)` can be used in matches. */
object NoSuccess {
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index d685329ef1..9a2c497eab 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -13,7 +13,7 @@ import java.util.regex.Pattern
import scala.util.matching.Regex
import scala.util.parsing.input._
import scala.collection.immutable.PagedSeq
-import language.implicitConversions
+import scala.language.implicitConversions
/** The ''most important'' differences between `RegexParsers` and
* [[scala.util.parsing.combinator.Parsers]] are:
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 215b8b792f..03979d43b7 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -13,7 +13,7 @@ package syntactical
import token._
import lexical.StdLexical
-import language.implicitConversions
+import scala.language.implicitConversions
/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 7aa6178df9..a3b94e2562 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -14,7 +14,7 @@ package syntactical
import token._
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
index ff3554a6af..80e9b0df39 100644
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
@@ -3,7 +3,7 @@ package scala.util.parsing.combinator.testing
import scala.util.parsing.combinator._
import scala.util.parsing.input._
-import language.postfixOps
+import scala.language.postfixOps
@deprecated("This class will be removed", "2.10.0")
case class Ident(s: String)
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 57a2c9c4c2..3366584ab2 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -8,7 +8,7 @@
package scala.util.parsing.input
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
/** `OffsetPosition` is a standard class for positions
* represented as offsets into a source ``document''.
diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala
index 88726d9336..1290e54f3a 100644
--- a/src/library/scala/volatile.scala
+++ b/src/library/scala/volatile.scala
@@ -10,7 +10,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
@field
-class volatile extends annotation.StaticAnnotation
+class volatile extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index f140fd1e07..2ca1dbfcd0 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -73,7 +73,7 @@ extends Node with Serializable
throw new IllegalArgumentException("prefix of zero length, use null instead")
if (scope == null)
- throw new IllegalArgumentException("scope is null, use xml.TopScope for empty scope")
+ throw new IllegalArgumentException("scope is null, use scala.xml.TopScope for empty scope")
//@todo: copy the children,
// setting namespace scope if necessary
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
index 07651adb90..0efbb4c511 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/library/scala/xml/Equality.scala
@@ -13,7 +13,7 @@ package scala.xml
* all the `xml` classes go through the `xml.Equality trait`. There are two
* forms of `xml` comparison.
*
- * 1. `'''def''' strict_==(other: xml.Equality)`
+ * 1. `'''def''' strict_==(other: scala.xml.Equality)`
*
* This one tries to honor the little things like symmetry and hashCode
* contracts. The `equals` method routes all comparisons through this.
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index e98ec90aca..15b3cb6d4a 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -9,7 +9,7 @@
package scala.xml
import Utility.sbToString
-import annotation.tailrec
+import scala.annotation.tailrec
import scala.collection.{ AbstractIterable, Iterator }
/**
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index 40ddc7d85c..e50e68d4fd 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -8,10 +8,10 @@
package scala.xml
-import collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
+import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
import mutable.{ Builder, ListBuffer }
import generic.{ CanBuildFrom }
-import language.implicitConversions
+import scala.language.implicitConversions
/** This object ...
*
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index bae529c85c..50a284d7cd 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -10,7 +10,7 @@ package scala.xml
import scala.collection.mutable
import parsing.XhtmlEntities
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* The `Utility` object provides utility functions for processing instances
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index a5d2a6bd7e..f98aff5709 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -11,9 +11,9 @@
package scala.xml
package dtd
-import util.regexp.WordExp
-import util.automata._
-import Utility.sbToString
+import scala.util.regexp.WordExp
+import scala.util.automata._
+import scala.xml.Utility.sbToString
import PartialFunction._
object ContentModel extends WordExp {
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 2d87bc0764..5d183df04b 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.xml
package dtd
@@ -21,10 +20,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def accept(tok: Int) = {
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
- sys.error("in DTDs, \n"+
+ scala.sys.error("in DTDs, \n"+
"mixed content models must be like (#PCDATA|Name|Name|...)*");
else
- sys.error("expected "+token2string(tok)+
+ scala.sys.error("expected "+token2string(tok)+
", got unexpected token:"+token2string(token));
}
nextToken
@@ -45,7 +44,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
}
case LPAREN =>
@@ -65,12 +64,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
accept( STAR );
res
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
}
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
// sopt ::= S?
def sOpt() = if( token == S ) nextToken;
@@ -118,12 +117,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def particle = token match {
case LPAREN => nextToken; sOpt; regexp;
case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => sys.error("expected '(' or Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
}
// atom ::= name
def atom = token match {
case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => sys.error("expected Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected Name, got:"+token2string(token));
}
}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 82a8d1af2f..2e753a7590 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -44,7 +44,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
}
final def accS(ds: Seq[Char]) { ds foreach acc }
@@ -65,7 +65,7 @@ class Scanner extends Tokens with parsing.TokenTests {
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
- else sys.error("unexpected character:" + c)
+ else scala.sys.error("unexpected character:" + c)
}
final def name = {
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 61d4855b2e..c543b8751b 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -18,7 +18,7 @@ trait NodeFactory[A <: Node] {
val ignoreProcInstr = false
/* default behaviour is to use hash-consing */
- val cache = new collection.mutable.HashMap[Int, List[A]]
+ val cache = new scala.collection.mutable.HashMap[Int, List[A]]
protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index f4d69ffe44..2af66f4f16 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -62,7 +62,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
val value = atts.getValue(i);
// @todo Need to use character references if the encoding
// can't support the character
- out.write(xml.Utility.escape(value))
+ out.write(scala.xml.Utility.escape(value))
out.write("'");
i += 1
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index af9b5f47cf..d4dc6da14d 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -56,7 +56,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
// See ticket #3720 for motivations.
private class WithLookAhead(underlying: Source) extends Source {
- private val queue = collection.mutable.Queue[Char]()
+ private val queue = scala.collection.mutable.Queue[Char]()
def lookahead(): BufferedIterator[Char] = {
val iter = queue.iterator ++ new Iterator[Char] {
def hasNext = underlying.hasNext
@@ -897,7 +897,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
new PublicID(pubID, sysID)
} else {
reportSyntaxError("PUBLIC or SYSTEM expected");
- sys.error("died parsing notationdecl")
+ scala.sys.error("died parsing notationdecl")
}
xSpaceOpt
xToken('>')
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index 096f8a8f38..219c3d6679 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -21,7 +21,7 @@ import Utility.SU
* All members should be accessed through those.
*/
private[scala] trait MarkupParserCommon extends TokenTests {
- protected def unreachable = sys.error("Cannot be reached.")
+ protected def unreachable = scala.sys.error("Cannot be reached.")
// type HandleType // MarkupHandler, SymbolicXMLBuilder
type InputType // Source, CharArrayReader
@@ -82,7 +82,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
case `end` => return buf.toString
case ch => buf append ch
}
- sys.error("Expected '%s'".format(end))
+ scala.sys.error("Expected '%s'".format(end))
}
/** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index c764d042c8..07fab27957 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -24,7 +24,7 @@ import scala.xml.parsing.{ ExternalSources, MarkupHandler, MarkupParser }
* @author Paul Phillips
*/
class XMLEventReader(src: Source)
-extends collection.AbstractIterator[XMLEvent]
+extends scala.collection.AbstractIterator[XMLEvent]
with ProducerConsumerIterator[XMLEvent] {
// We implement a pull parser as an iterator, but since we may be operating on
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
index 73a7b92778..b27ce6ff75 100644
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -2,7 +2,7 @@ package scala.tools
package partest
import nsc.io.{ File, Path, Directory }
-import util.{ PathResolver }
+import scala.tools.util.PathResolver
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
import java.lang.Runtime.getRuntime
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
index b86a8e2c7f..9bfd444180 100644
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ b/src/partest/scala/tools/partest/TestUtil.scala
@@ -1,5 +1,7 @@
package scala.tools.partest
+import scala.reflect.{ classTag, ClassTag }
+
trait TestUtil {
/** Given function and block of code, evaluates code block,
* calls function with nanoseconds elapsed, and returns block result.
@@ -29,8 +31,16 @@ trait TestUtil {
assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult)
}
+
+ def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
+ try {
+ code
+ assert(false, "did not throw " + classTag[T])
+ } catch {
+ case ex: Exception if classTag[T].runtimeClass isInstance ex =>
+ }
}
object TestUtil extends TestUtil {
-} \ No newline at end of file
+}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 064b82da85..512c718040 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -13,7 +13,7 @@ import java.io.{File, FilenameFilter, IOException, StringWriter,
FileReader, PrintWriter, FileWriter}
import java.net.URI
import scala.tools.nsc.io.{ Path, Directory, File => SFile }
-import sys.process._
+import scala.sys.process._
import scala.collection.mutable
trait FileUtil {
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
index 20d61d0831..4961424e1b 100644
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -217,6 +217,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
"-Dpartest.output="+outDir.getAbsolutePath,
"-Dpartest.lib="+LATEST_LIB,
"-Dpartest.reflect="+LATEST_REFLECT,
+ "-Dpartest.comp="+LATEST_COMP,
"-Dpartest.cwd="+outDir.getParent,
"-Dpartest.test-path="+testFullPath,
"-Dpartest.testname="+fileBase,
@@ -312,8 +313,8 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
val testFiles = dir.listFiles.toList filter isJavaOrScala
def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
- val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
- val noGroupSuffix = testFiles filterNot (groups.flatten contains)
+ val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted)
+ val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted
noGroupSuffix :: groups filterNot (_.isEmpty)
}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 266153d9d3..206ee19c76 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -4,7 +4,7 @@ package nest
import java.io.File
import scala.tools.nsc.io.{ Directory }
import scala.util.Properties.setProp
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
object SBTRunner extends DirectRunner {
@@ -34,7 +34,7 @@ object SBTRunner extends DirectRunner {
scalacOptions: Seq[String] = Seq(),
justFailedTests: Boolean = false)
- def mainReflect(args: Array[String]): java.util.Map[String, TestState] = {
+ def mainReflect(args: Array[String]): java.util.Map[String, String] = {
setProp("partest.debug", "true")
val Argument = new scala.util.matching.Regex("-(.*)")
@@ -73,9 +73,13 @@ object SBTRunner extends DirectRunner {
(for {
(testType, files) <- runs
(path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
- } yield (path, result)).seq.asJava
+ } yield (path, fixResult(result))).seq.asJava
+ }
+ def fixResult(result: TestState): String = result match {
+ case TestState.Ok => "OK"
+ case TestState.Fail => "FAIL"
+ case TestState.Timeout => "TIMEOUT"
}
-
def main(args: Array[String]): Unit = {
val failures = (
for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index 08934ef143..ebd3e46b7c 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -6,7 +6,7 @@ package scala.tools
import java.io.{ FileNotFoundException, File => JFile }
import nsc.io.{ Path, Directory, File => SFile }
-import util.{ PathResolver }
+import scala.tools.util.PathResolver
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
import scala.sys.process.javaVmArguments
import java.util.concurrent.Callable
@@ -30,8 +30,8 @@ package object partest {
implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
- implicit lazy val postfixOps = language.postfixOps
- implicit lazy val implicitConversions = language.implicitConversions
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
def timed[T](body: => T): (T, Long) = {
val t1 = System.currentTimeMillis
@@ -62,7 +62,7 @@ package object partest {
)
def allPropertiesString = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
}
@@ -73,4 +73,31 @@ package object partest {
def isPartestDebug: Boolean =
propOrEmpty("partest.debug") == "true"
+
+
+ import scala.language.experimental.macros
+
+ /**
+ * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+ * {{{
+ * trace> "".isEmpty
+ * res: Boolean = true
+ *
+ * }}}
+ *
+ * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+ * test code in a string.
+ */
+ def trace[A](a: A) = macro traceImpl[A]
+
+ import scala.reflect.macros.Context
+ def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+ import c.universe._
+ val exprCode = c.literal(show(a.tree))
+ val exprType = c.literal(show(a.actualType))
+ reify {
+ println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+ a.splice
+ }
+ }
}
diff --git a/src/reflect/scala/reflect/api/AnnotationInfos.scala b/src/reflect/scala/reflect/api/AnnotationInfos.scala
deleted file mode 100644
index d9f35024d9..0000000000
--- a/src/reflect/scala/reflect/api/AnnotationInfos.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala.reflect
-package api
-
-trait AnnotationInfos extends base.AnnotationInfos { self: Universe =>
-
- override type AnnotationInfo >: Null <: AnyRef with AnnotationInfoApi
- trait AnnotationInfoApi {
- def atp: Type
- def args: List[Tree]
- def assocs: List[(Name, ClassfileAnnotArg)]
- }
-
- override type LiteralAnnotArg >: Null <: ClassfileAnnotArg with LiteralAnnotArgApi
- trait LiteralAnnotArgApi {
- def const: Constant
- }
-
- override type ArrayAnnotArg >: Null <: ClassfileAnnotArg with ArrayAnnotArgApi
- trait ArrayAnnotArgApi {
- def args: Array[ClassfileAnnotArg]
- }
-
- override type NestedAnnotArg >: Null <: ClassfileAnnotArg with NestedAnnotArgApi
- trait NestedAnnotArgApi {
- def annInfo: AnnotationInfo
- }
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
new file mode 100644
index 0000000000..43e95f9902
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -0,0 +1,29 @@
+package scala.reflect
+package api
+
+import scala.collection.immutable.ListMap
+
+trait Annotations extends base.Annotations { self: Universe =>
+
+ override type Annotation >: Null <: AnyRef with AnnotationApi
+ trait AnnotationApi {
+ def tpe: Type
+ def scalaArgs: List[Tree]
+ def javaArgs: ListMap[Name, JavaArgument]
+ }
+
+ override type LiteralArgument >: Null <: JavaArgument with LiteralArgumentApi
+ trait LiteralArgumentApi {
+ def value: Constant
+ }
+
+ override type ArrayArgument >: Null <: JavaArgument with ArrayArgumentApi
+ trait ArrayArgumentApi {
+ def args: Array[JavaArgument]
+ }
+
+ override type NestedArgument >: Null <: JavaArgument with NestedArgumentApi
+ trait NestedArgumentApi {
+ def annotation: Annotation
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 36836e84a9..fdd43f1883 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -3,7 +3,7 @@ package api
import scala.language.implicitConversions
-trait FlagSets { self: Universe =>
+trait FlagSets extends base.FlagSets { self: Universe =>
type FlagSet
diff --git a/src/reflect/scala/reflect/api/FrontEnds.scala b/src/reflect/scala/reflect/api/FrontEnds.scala
index a27450d49d..61ea227c47 100644
--- a/src/reflect/scala/reflect/api/FrontEnds.scala
+++ b/src/reflect/scala/reflect/api/FrontEnds.scala
@@ -24,7 +24,7 @@ trait FrontEnds {
def hasWarnings = WARNING.count > 0
case class Info(val pos: Position, val msg: String, val severity: Severity)
- val infos = new collection.mutable.LinkedHashSet[Info]
+ val infos = new scala.collection.mutable.LinkedHashSet[Info]
/** Handles incoming info */
def log(pos: Position, msg: String, severity: Severity) {
@@ -67,4 +67,4 @@ trait FrontEnds {
*/
// todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index 7d185d9879..8c4c423221 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -1,7 +1,7 @@
package scala.reflect
package api
-trait Mirrors { self: Universe =>
+trait Mirrors extends base.Mirrors { self: Universe =>
type RuntimeClass >: Null
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 27d3b8ba7d..65ff2ed9fa 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -23,7 +23,7 @@ trait Printers { self: Universe =>
case class BooleanFlag(val value: Option[Boolean])
object BooleanFlag {
- import language.implicitConversions
+ import scala.language.implicitConversions
implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 0c4f143d57..0c4be4f7e1 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -15,9 +15,10 @@ trait Symbols extends base.Symbols { self: Universe =>
/** The API of symbols */
trait SymbolApi extends SymbolBase { this: Symbol =>
- /** The position of this symbol
+ /** Source file if this symbol is created during this compilation run,
+ * or a class file if this symbol is loaded from a *.class or *.jar.
*/
- def pos: Position
+ def associatedFile: scala.tools.nsc.io.AbstractFile
/** A list of annotations attached to this Symbol.
*/
@@ -35,13 +36,8 @@ trait Symbols extends base.Symbols { self: Universe =>
// at scala.reflect.internal.pickling.UnPickler$Scan.run(UnPickler.scala:88)
// at scala.reflect.internal.pickling.UnPickler.unpickle(UnPickler.scala:37)
// at scala.reflect.runtime.JavaMirrors$JavaMirror.unpickleClass(JavaMirrors.scala:253) // unpickle from within a reflexive mirror
- // def annotations: List[AnnotationInfo]
- def getAnnotations: List[AnnotationInfo]
-
- /** Whether this symbol carries an annotation for which the given
- * symbol is its typeSymbol.
- */
- def hasAnnotation(sym: Symbol): Boolean
+ // def annotations: List[Annotation]
+ def getAnnotations: List[Annotation]
/** For a class: the module or case class factory with the same name in the same package.
* For a module: the class with the same name in the same package.
@@ -139,18 +135,6 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isErroneous : Boolean
- /** Can this symbol be loaded by a reflective mirror?
- *
- * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
- * Such annotations (also called "pickles") are applied on top-level classes and include information
- * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
- * are typically unreachable and information about them gets lost.
- *
- * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
- * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
- */
- def isLocatable: Boolean
-
/** Is this symbol static (i.e. with no outer instance)?
* Q: When exactly is a sym marked as STATIC?
* A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
@@ -182,6 +166,14 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isSpecialized: Boolean
+ /** Is this symbol defined by Java?
+ */
+ def isJava: Boolean
+
+ /** Does this symbol represent an implicit value, definition, class or parameter?
+ */
+ def isImplicit: Boolean
+
/******************* helpers *******************/
/** ...
@@ -200,23 +192,20 @@ trait Symbols extends base.Symbols { self: Universe =>
/** ...
*/
def suchThat(cond: Symbol => Boolean): Symbol
-
- /** The string discriminator of this symbol; useful for debugging */
- def kind: String
}
/** The API of term symbols */
trait TermSymbolApi extends SymbolApi with TermSymbolBase { this: TermSymbol =>
- /** Does this symbol represent a value, i.e. not a module and not a method?
+ /** Is this symbol introduced as `val`?
*/
- def isValue: Boolean
+ def isVal: Boolean
/** Does this symbol denote a stable value? */
def isStable: Boolean
- /** Does this symbol represent a mutable value?
+ /** Is this symbol introduced as `var`?
*/
- def isVariable: Boolean
+ def isVar: Boolean
/** Does this symbol represent a getter or a setter?
*/
@@ -237,10 +226,6 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isOverloaded : Boolean
- /** Does this symbol represent an implicit value, definition or parameter?
- */
- def isImplicit: Boolean
-
/** Does this symbol represent a lazy value?
*/
def isLazy: Boolean
@@ -323,6 +308,9 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isConstructor: Boolean
+ /** Does this symbol denote the primary constructor of its enclosing class? */
+ def isPrimaryConstructor: Boolean
+
/** For a polymorphic method, its type parameters, the empty list for all other methods */
def typeParams: List[Symbol]
@@ -381,6 +369,22 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isSealed: Boolean
+ /** If this is a sealed class, its known direct subclasses.
+ * Otherwise, the empty set.
+ */
+ def knownDirectSubclasses: Set[Symbol]
+
+ /** The list of all base classes of this type (including its own typeSymbol)
+ * in reverse linearization order, starting with the class itself and ending
+ * in class Any.
+ */
+ def baseClasses: List[Symbol]
+
+ /** The module corresponding to this module class,
+ * or NoSymbol if this symbol is not a module class.
+ */
+ def module: Symbol
+
/** If this symbol is a class or trait, its self type, otherwise the type
* of the symbol itself.
*/
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 5522693b29..e46a977be8 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -424,14 +424,6 @@ trait Trees extends base.Trees { self: Universe =>
trait ApplyApi extends GenericApplyApi { this: Apply =>
}
- override type ApplyDynamic >: Null <: TermTree with SymTree with ApplyDynamicApi
-
- /** The API that all apply dynamics support */
- trait ApplyDynamicApi extends TermTreeApi with SymTreeApi { this: ApplyDynamic =>
- val qual: Tree
- val args: List[Tree]
- }
-
override type Super >: Null <: TermTree with SuperApi
/** The API that all supers support */
@@ -586,7 +578,6 @@ trait Trees extends base.Trees { self: Universe =>
def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply
def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply
- def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
def Super(tree: Tree, qual: Tree, mix: TypeName): Super
def This(tree: Tree, qual: Name): This
def Select(tree: Tree, qualifier: Tree, selector: Name): Select
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index bdcaadfbda..1c79de02c3 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -121,15 +121,6 @@ trait Types extends base.Types { self: Universe =>
*/
def widen: Type
- /** Map to a singleton type which is a subtype of this type.
- * The fallback implemented here gives:
- * {{{
- * T.narrow = (T {}).this.type
- * }}}
- * Overridden where we know more about where types come from.
- */
- def narrow: Type
-
/******************* helpers *******************/
/** Substitute symbols in `to` for corresponding occurrences of references to
@@ -159,9 +150,6 @@ trait Types extends base.Types { self: Universe =>
/** Does this type contain a reference to given symbol? */
def contains(sym: Symbol): Boolean
-
- /** The string discriminator of this type; useful for debugging */
- def kind: String
}
/** .. */
@@ -267,7 +255,7 @@ trait Types extends base.Types { self: Universe =>
/** The API that all annotated types support */
trait AnnotatedTypeApi extends TypeApi { this: AnnotatedType =>
- val annotations: List[AnnotationInfo]
+ val annotations: List[Annotation]
val underlying: Type
val selfsym: Symbol
}
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 3dce0f218e..3165f9abcd 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -14,4 +14,4 @@ abstract class Universe extends base.Universe
with StandardDefinitions
with StandardNames
with Importers
- with AnnotationInfos
+ with Annotations
diff --git a/src/reflect/scala/reflect/internal/AbstractFileApi.scala b/src/reflect/scala/reflect/internal/AbstractFileApi.scala
deleted file mode 100644
index 9f37f4536f..0000000000
--- a/src/reflect/scala/reflect/internal/AbstractFileApi.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package scala.reflect
-package internal
-
-trait AbstractFileApi {
- def path: String
- def canonicalPath: String
-}
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index a444c786f7..3bd7f4f4fa 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -9,9 +9,10 @@ package internal
import util._
import pickling.ByteCodecs
import scala.annotation.tailrec
+import scala.collection.immutable.ListMap
/** AnnotationInfo and its helpers */
-trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
+trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
// Common annotation code between Symbol and Type.
@@ -32,7 +33,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
case AnnotationInfo(tp, Literal(Constant(tpe: Type)) :: Nil, _) if tp.typeSymbol == ThrowsClass => tpe.typeSymbol
}
- /** Tests for, get, or remove an annotation */
+ /** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
dropOtherAnnotations(annotations, cls).nonEmpty
@@ -43,12 +44,12 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
case ann :: _ => Some(ann)
case _ => None
}
-
+
def removeAnnotation(cls: Symbol): Self = filterAnnotations(ann => !(ann matches cls))
-
+
final def withAnnotation(annot: AnnotationInfo): Self = withAnnotations(List(annot))
- @tailrec private
+ @tailrec private
def dropOtherAnnotations(anns: List[AnnotationInfo], cls: Symbol): List[AnnotationInfo] = anns match {
case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
case Nil => Nil
@@ -63,28 +64,47 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
* - or nested classfile annotations
*/
abstract class ClassfileAnnotArg extends Product
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ implicit val JavaArgumentTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ case object UnmappableAnnotArg extends ClassfileAnnotArg
/** Represents a compile-time Constant (`Boolean`, `Byte`, `Short`,
* `Char`, `Int`, `Long`, `Float`, `Double`, `String`, `java.lang.Class` or
* an instance of a Java enumeration value).
*/
case class LiteralAnnotArg(const: Constant)
- extends ClassfileAnnotArg with LiteralAnnotArgApi {
+ extends ClassfileAnnotArg with LiteralArgumentApi {
+ def value = const
override def toString = const.escapedStringValue
}
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
-
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
+ object LiteralAnnotArg extends LiteralArgumentExtractor
/** Represents an array of classfile annotation arguments */
case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
- extends ClassfileAnnotArg with ArrayAnnotArgApi {
+ extends ClassfileAnnotArg with ArrayArgumentApi {
override def toString = args.mkString("[", ", ", "]")
}
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ object ArrayAnnotArg extends ArrayArgumentExtractor
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
+ /** Represents a nested classfile annotation */
+ case class NestedAnnotArg(annInfo: AnnotationInfo)
+ extends ClassfileAnnotArg with NestedArgumentApi {
+ // The nested annotation should not have any Scala annotation arguments
+ assert(annInfo.args.isEmpty, annInfo.args)
+ def annotation = annInfo
+ override def toString = annInfo.toString
+ }
+ object NestedAnnotArg extends NestedArgumentExtractor
+
+ type JavaArgument = ClassfileAnnotArg
+ type LiteralArgument = LiteralAnnotArg
+ val LiteralArgument = LiteralAnnotArg
+ implicit val LiteralArgumentTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
+ type ArrayArgument = ArrayAnnotArg
+ val ArrayArgument = ArrayAnnotArg
+ implicit val ArrayArgumentTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ type NestedArgument = NestedAnnotArg
+ val NestedArgument = NestedAnnotArg
+ implicit val NestedArgumentTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
/** A specific annotation argument that encodes an array of bytes as an
* array of `Long`. The type of the argument declared in the annotation
@@ -121,20 +141,9 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
src
}
-
- }
-
- /** Represents a nested classfile annotation */
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg with NestedAnnotArgApi {
- // The nested annotation should not have any Scala annotation arguments
- assert(annInfo.args.isEmpty, annInfo.args)
- override def toString = annInfo.toString
}
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
- object NestedAnnotArg extends NestedAnnotArgExtractor
-
- object AnnotationInfo extends AnnotationInfoExtractor {
+ object AnnotationInfo {
def marker(atp: Type): AnnotationInfo =
apply(atp, Nil, Nil)
@@ -165,11 +174,14 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
this
}
- override def toString = (
- atp +
- (if (!args.isEmpty) args.mkString("(", ", ", ")") else "") +
- (if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else "")
- )
+ override def toString = completeAnnotationToString(this)
+ }
+
+ private[scala] def completeAnnotationToString(annInfo: AnnotationInfo) = {
+ import annInfo._
+ val s_args = if (!args.isEmpty) args.mkString("(", ", ", ")") else ""
+ val s_assocs = if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else ""
+ s"${atp}${s_args}${s_assocs}"
}
/** Symbol annotations parsed in `Namer` (typeCompleter of
@@ -207,11 +219,15 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
*
* `assocs` stores arguments to classfile annotations as name-value pairs.
*/
- sealed abstract class AnnotationInfo extends AnnotationInfoApi {
+ abstract class AnnotationInfo extends AnnotationApi {
def atp: Type
def args: List[Tree]
def assocs: List[(Name, ClassfileAnnotArg)]
+ def tpe = atp
+ def scalaArgs = args
+ def javaArgs = ListMap(assocs: _*)
+
// necessary for reification, see Reifiers.scala for more info
def original: Tree
def setOriginal(t: Tree): this.type
@@ -299,7 +315,14 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
}
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
+ type Annotation = AnnotationInfo
+ object Annotation extends AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, ClassfileAnnotArg]): Annotation =
+ AnnotationInfo(tpe, scalaArgs, javaArgs.toList)
+ def unapply(annotation: Annotation): Option[(Type, List[Tree], ListMap[Name, ClassfileAnnotArg])] =
+ Some((annotation.tpe, annotation.scalaArgs, annotation.javaArgs))
+ }
+ implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 554b3bfca6..539984c67f 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -7,7 +7,7 @@ package internal
// todo implement in terms of BitSet
import scala.collection.{ mutable, immutable }
-import math.max
+import scala.math.max
import util.Statistics
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
@@ -99,7 +99,7 @@ trait BaseTypeSeqs {
def copy(head: Type, offset: Int): BaseTypeSeq = {
val arr = new Array[Type](elems.length + offset)
- compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
+ scala.compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
arr(0) = head
newBaseTypeSeq(parents, arr)
}
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 74b9442076..f7371f4180 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -30,14 +30,11 @@ trait BuildUtils extends base.BuildUtils { self: SymbolTable =>
else MissingRequirementError.notFound("overloaded method %s #%d in %s".format(name, index, owner.fullName))
}
- def newFreeTerm(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- newFreeTermSymbol(newTermName(name), info, value, flags, origin)
+ def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ newFreeTermSymbol(newTermName(name), value, flags, origin)
- def newFreeType(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- newFreeTypeSymbol(newTypeName(name), info, value, (if (flags == 0L) PARAM else flags) | DEFERRED, origin)
-
- def newFreeExistential(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- newFreeTypeSymbol(newTypeName(name), info, value, (if (flags == 0L) EXISTENTIAL else flags) | DEFERRED, origin)
+ def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ newFreeTypeSymbol(newTypeName(name), flags, origin)
def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
owner.newNestedSymbol(name, pos, flags, isClass)
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
index 6ece733b06..e5e5325b93 100644
--- a/src/reflect/scala/reflect/internal/Chars.scala
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -5,9 +5,9 @@
package scala.reflect
package internal
-import annotation.{ tailrec, switch }
+import scala.annotation.{ tailrec, switch }
import java.lang.{ Character => JCharacter }
-import language.postfixOps
+import scala.language.postfixOps
/** Contains constants and classifier methods for characters */
trait Chars {
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index 124f1f881d..62ed130232 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -6,7 +6,7 @@
package scala.reflect
package internal
-import annotation.switch
+import scala.annotation.switch
object ClassfileConstants {
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index e5a543da46..61fa553484 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -7,7 +7,7 @@ package scala.reflect
package internal
import java.lang.Integer.toOctalString
-import annotation.switch
+import scala.annotation.switch
trait Constants extends api.Constants {
self: SymbolTable =>
@@ -31,6 +31,9 @@ trait Constants extends api.Constants {
final val EnumTag = 13
case class Constant(value: Any) extends ConstantApi {
+ import java.lang.Double.doubleToRawLongBits
+ import java.lang.Float.floatToRawIntBits
+
val tag: Int = value match {
case null => NullTag
case x: Unit => UnitTag
@@ -81,10 +84,10 @@ trait Constants extends api.Constants {
/** We need the equals method to take account of tags as well as values.
*/
+ // !!! In what circumstance could `equalHashValue == that.equalHashValue && tag != that.tag` be true?
override def equals(other: Any): Boolean = other match {
case that: Constant =>
- this.tag == that.tag &&
- (this.value == that.value || this.isNaN && that.isNaN)
+ this.tag == that.tag && equalHashValue == that.equalHashValue
case _ => false
}
@@ -236,7 +239,30 @@ trait Constants extends api.Constants {
def typeValue: Type = value.asInstanceOf[Type]
def symbolValue: Symbol = value.asInstanceOf[Symbol]
- override def hashCode: Int = value.## * 41 + 17
+ /**
+ * Consider two `NaN`s to be identical, despite non-equality
+ * Consider -0d to be distinct from 0d, despite equality
+ *
+ * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`)
+ * to avoid treating different encodings of `NaN` as the same constant.
+ * You probably can't express different `NaN` varieties as compile time
+ * constants in regular Scala code, but it is conceivable that you could
+ * conjure them with a macro.
+ */
+ private def equalHashValue: Any = value match {
+ case f: Float => floatToRawIntBits(f)
+ case d: Double => doubleToRawLongBits(d)
+ case v => v
+ }
+
+ override def hashCode: Int = {
+ import scala.util.hashing.MurmurHash3._
+ val seed = 17
+ var h = seed
+ h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
+ h = mix(h, equalHashValue.##)
+ finalizeHash(h, length = 2)
+ }
}
object Constant extends ConstantExtractor
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 98d42b724c..48a658192b 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -6,7 +6,7 @@
package scala.reflect
package internal
-import annotation.{ switch, meta }
+import scala.annotation.{ switch, meta }
import scala.collection.{ mutable, immutable }
import Flags._
import PartialFunction._
@@ -218,6 +218,32 @@ trait Definitions extends api.StandardDefinitions {
case _ => null
}
+ /** Fully initialize the symbol, type, or scope.
+ */
+ def fullyInitializeSymbol(sym: Symbol): Symbol = {
+ sym.initialize
+ fullyInitializeType(sym.info)
+ fullyInitializeType(sym.tpe)
+ sym
+ }
+ def fullyInitializeType(tp: Type): Type = {
+ tp.typeParams foreach fullyInitializeSymbol
+ tp.paramss.flatten foreach fullyInitializeSymbol
+ tp
+ }
+ def fullyInitializeScope(scope: Scope): Scope = {
+ scope.sorted foreach fullyInitializeSymbol
+ scope
+ }
+ /** Is this type equivalent to Any, AnyVal, or AnyRef? */
+ def isTrivialTopType(tp: Type) = (
+ tp =:= AnyClass.tpe
+ || tp =:= AnyValClass.tpe
+ || tp =:= AnyRefClass.tpe
+ )
+ /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */
+ def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp))
+
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
if (parents.head.typeSymbol == AnyClass) tpe
@@ -251,6 +277,7 @@ trait Definitions extends api.StandardDefinitions {
anyval
}).asInstanceOf[ClassSymbol]
lazy val AnyValTpe = definitions.AnyValClass.toTypeConstructor
+ def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
// bottom types
lazy val RuntimeNothingClass = getClassByName(fulltpnme.RuntimeNothing)
@@ -383,7 +410,7 @@ trait Definitions extends api.StandardDefinitions {
def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe)
def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
- def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
+ def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
def isJavaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isJavaRepeatedParamType(params.last.tpe)
def isScalaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe)
def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
@@ -478,8 +505,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
lazy val TypeTagsClass = requiredClass[scala.reflect.base.TypeTags]
- lazy val AbsTypeTagClass = getMemberClass(TypeTagsClass, tpnme.AbsTypeTag)
- lazy val AbsTypeTagModule = getMemberModule(TypeTagsClass, nme.AbsTypeTag)
+ lazy val WeakTypeTagClass = getMemberClass(TypeTagsClass, tpnme.WeakTypeTag)
+ lazy val WeakTypeTagModule = getMemberModule(TypeTagsClass, nme.WeakTypeTag)
lazy val TypeTagClass = getMemberClass(TypeTagsClass, tpnme.TypeTag)
lazy val TypeTagModule = getMemberModule(TypeTagsClass, nme.TypeTag)
@@ -500,7 +527,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
lazy val MacroInternalPackage = getPackageObject("scala.reflect.macros.internal")
def MacroInternal_materializeClassTag = getMemberMethod(MacroInternalPackage, nme.materializeClassTag)
- def MacroInternal_materializeAbsTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeAbsTypeTag)
+ def MacroInternal_materializeWeakTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeWeakTypeTag)
def MacroInternal_materializeTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeTypeTag)
lazy val StringContextClass = requiredClass[scala.StringContext]
@@ -515,8 +542,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
- def compilerTypeFromTag(tt: BaseUniverse # AbsTypeTag[_]): Type = tt.in(rootMirror).tpe
- def compilerSymbolFromTag(tt: BaseUniverse # AbsTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
+ def compilerTypeFromTag(tt: BaseUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe
+ def compilerSymbolFromTag(tt: BaseUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
@@ -905,7 +932,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val SwitchClass = requiredClass[scala.annotation.switch]
lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
lazy val VarargsClass = requiredClass[scala.annotation.varargs]
- lazy val StaticClass = requiredClass[scala.annotation.static]
lazy val uncheckedStableClass = requiredClass[scala.annotation.unchecked.uncheckedStable]
lazy val uncheckedVarianceClass = requiredClass[scala.annotation.unchecked.uncheckedVariance]
@@ -914,6 +940,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
+ lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
+ lazy val DeprecatedOverridingAttr = requiredClass[scala.deprecatedOverriding]
lazy val NativeAttr = requiredClass[scala.native]
lazy val RemoteAttr = requiredClass[scala.remote]
lazy val ScalaInlineClass = requiredClass[scala.inline]
@@ -1087,7 +1115,8 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a phantom class for which no runtime representation exists? */
lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
- lazy val magicSymbols = List(
+ /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreClasses = List(
AnnotationDefaultAttr, // #2264
RepeatedParamClass,
JavaRepeatedParamClass,
@@ -1098,7 +1127,10 @@ trait Definitions extends api.StandardDefinitions {
NullClass,
NothingClass,
SingletonClass,
- EqualsPatternClass,
+ EqualsPatternClass
+ )
+ /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreMethods = List(
Any_==,
Any_!=,
Any_equals,
@@ -1116,10 +1148,19 @@ trait Definitions extends api.StandardDefinitions {
Object_synchronized,
Object_isInstanceOf,
Object_asInstanceOf,
- String_+,
+ String_+
+ )
+ /** Lists core classes that do have underlying bytecode, but are adjusted on-the-fly in every reflection universe */
+ lazy val hijackedCoreClasses = List(
ComparableClass,
JavaSerializableClass
)
+ /** Lists symbols that are synthesized or hijacked by the compiler.
+ *
+ * Such symbols either don't have any underlying bytecode at all ("synthesized")
+ * or get loaded from bytecode but have their metadata adjusted ("hijacked").
+ */
+ lazy val symbolsNotPresentInBytecode = syntheticCoreClasses ++ syntheticCoreMethods ++ hijackedCoreClasses
/** Is the symbol that of a parent which is added during parsing? */
lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
@@ -1183,7 +1224,8 @@ trait Definitions extends api.StandardDefinitions {
def init() {
if (isInitialized) return
- val forced = magicSymbols // force initialization of every symbol that is entered as a side effect
+ // force initialization of every symbol that is synthesized or hijacked by the compiler
+ val forced = symbolsNotPresentInBytecode
isInitialized = true
} //init
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index b03d01c944..6270416d4f 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -1,7 +1,7 @@
package scala.reflect
package internal
-import language.implicitConversions
+import scala.language.implicitConversions
trait FlagSets extends api.FlagSets { self: SymbolTable =>
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index d5baad8ab1..c116928d37 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -3,7 +3,7 @@ package internal
import scala.collection.mutable.WeakHashMap
// SI-6241: move importers to a mirror
-trait Importers { self: SymbolTable =>
+trait Importers extends api.Importers { self: SymbolTable =>
def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
if (self eq from0) {
@@ -32,7 +32,7 @@ trait Importers { self: SymbolTable =>
// fixups and maps prevent stackoverflows in importer
var pendingSyms = 0
var pendingTpes = 0
- lazy val fixups = collection.mutable.MutableList[Function0[Unit]]()
+ lazy val fixups = scala.collection.mutable.MutableList[Function0[Unit]]()
def addFixup(fixup: => Unit): Unit = fixups += (() => fixup)
def tryFixup(): Unit = {
if (pendingSyms == 0 && pendingTpes == 0) {
@@ -72,9 +72,9 @@ trait Importers { self: SymbolTable =>
case x: from.ModuleSymbol =>
linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
case x: from.FreeTermSymbol =>
- newFreeTermSymbol(importName(x.name).toTermName, importType(x.info), x.value, x.flags, x.origin)
+ newFreeTermSymbol(importName(x.name).toTermName, x.value, x.flags, x.origin) setInfo importType(x.info)
case x: from.FreeTypeSymbol =>
- newFreeTypeSymbol(importName(x.name).toTypeName, importType(x.info), x.value, x.flags, x.origin)
+ newFreeTypeSymbol(importName(x.name).toTypeName, x.flags, x.origin)
case x: from.TermSymbol =>
linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
case x: from.TypeSkolem =>
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 2fdf27d847..0f42db95e3 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -8,7 +8,7 @@ package internal
import scala.io.Codec
import java.security.MessageDigest
-import language.implicitConversions
+import scala.language.implicitConversions
trait LowPriorityNames {
self: Names =>
@@ -67,7 +67,7 @@ trait Names extends api.Names with LowPriorityNames {
while (i < len) {
if (nc + i == chrs.length) {
val newchrs = new Array[Char](chrs.length * 2)
- compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
+ scala.compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
chrs = newchrs
}
chrs(nc + i) = cs(offset + i)
@@ -149,11 +149,15 @@ trait Names extends api.Names with LowPriorityNames {
type ThisNameType >: Null <: Name
protected[this] def thisName: ThisNameType
+ // Note that "Name with ThisNameType" should be redundant
+ // because ThisNameType <: Name, but due to SI-6161 the
+ // compile loses track of this fact.
+
/** Index into name table */
def start: Int = index
/** The next name in the same hash bucket. */
- def next: ThisNameType
+ def next: Name with ThisNameType
/** The length of this name. */
final def length: Int = len
@@ -169,17 +173,17 @@ trait Names extends api.Names with LowPriorityNames {
def bothNames: List[Name] = List(toTermName, toTypeName)
/** Return the subname with characters from from to to-1. */
- def subName(from: Int, to: Int): ThisNameType
+ def subName(from: Int, to: Int): Name with ThisNameType
/** Return a new name of the same variety. */
- def newName(str: String): ThisNameType
+ def newName(str: String): Name with ThisNameType
/** Return a new name based on string transformation. */
- def mapName(f: String => String): ThisNameType = newName(f(toString))
+ def mapName(f: String => String): Name with ThisNameType = newName(f(toString))
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
- compat.Platform.arraycopy(chrs, index, cs, offset, len)
+ scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
final def toChars: Array[Char] = {
@@ -195,7 +199,7 @@ trait Names extends api.Names with LowPriorityNames {
*/
final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
val bytes = Codec.toUTF8(chrs, index, len)
- compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
+ scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
offset + bytes.length
}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 9580ed1f72..4c423e0bc2 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -10,7 +10,7 @@ package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
trait Printers extends api.Printers { self: SymbolTable =>
@@ -664,7 +664,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
def show(flags: FlagSet): String = {
if (flags == NoFlags) nme.NoFlags.toString
else {
- val s_flags = new collection.mutable.ListBuffer[String]
+ val s_flags = new scala.collection.mutable.ListBuffer[String]
def hasFlag(left: Long, right: Long): Boolean = (left & right) != 0
for (i <- 0 to 63 if hasFlag(flags, 1L << i))
s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index abbe8fbfb7..842491d56d 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -5,8 +5,6 @@ import settings.MutableSettings
trait Required { self: SymbolTable =>
- type AbstractFileType >: Null <: AbstractFileApi
-
def picklerPhase: Phase
def settings: MutableSettings
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 60b3a6f436..5f6a3bf777 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -10,7 +10,7 @@ trait StdAttachments {
trait Attachable {
protected var rawatt: base.Attachments { type Pos = Position } = NoPosition
def attachments = rawatt
- def addAttachment(attachment: Any): this.type = { rawatt = rawatt.add(attachment); this }
+ def updateAttachment[T: ClassTag](attachment: T): this.type = { rawatt = rawatt.update(attachment); this }
def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
// cannot be final due to SynchronizedSymbols
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index f63e2602b1..2f305296f5 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -8,8 +8,8 @@ package internal
import java.security.MessageDigest
import Chars.isOperatorPart
-import annotation.switch
-import language.implicitConversions
+import scala.annotation.switch
+import scala.language.implicitConversions
import scala.collection.immutable
import scala.io.Codec
@@ -131,7 +131,7 @@ trait StdNames {
final val Seq: NameType = "Seq"
final val Symbol: NameType = "Symbol"
final val ClassTag: NameType = "ClassTag"
- final val AbsTypeTag: NameType = "AbsTypeTag"
+ final val WeakTypeTag: NameType = "WeakTypeTag"
final val TypeTag : NameType = "TypeTag"
final val Expr: NameType = "Expr"
final val String: NameType = "String"
@@ -246,7 +246,6 @@ trait StdNames {
final val BeanPropertyAnnot: NameType = "BeanProperty"
final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
final val bridgeAnnot: NameType = "bridge"
- final val staticAnnot: NameType = "static"
// Classfile Attributes
final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
@@ -571,7 +570,7 @@ trait StdNames {
// Compiler utilized names
val AnnotatedType: NameType = "AnnotatedType"
- val AnnotationInfo: NameType = "AnnotationInfo"
+ val Annotation: NameType = "Annotation"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
val AppliedTypeTree: NameType = "AppliedTypeTree"
@@ -704,14 +703,13 @@ trait StdNames {
val manifestToTypeTag: NameType = "manifestToTypeTag"
val map: NameType = "map"
val materializeClassTag: NameType = "materializeClassTag"
- val materializeAbsTypeTag: NameType = "materializeAbsTypeTag"
+ val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
val materializeTypeTag: NameType = "materializeTypeTag"
val mirror : NameType = "mirror"
val moduleClass : NameType = "moduleClass"
val name: NameType = "name"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
- val newFreeExistential: NameType = "newFreeExistential"
val newFreeTerm: NameType = "newFreeTerm"
val newFreeType: NameType = "newFreeType"
val newNestedSymbol: NameType = "newNestedSymbol"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 4100e97cdd..2424e75949 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -47,6 +47,8 @@ abstract class SymbolTable extends macros.Universe
def globalError(msg: String): Unit = abort(msg)
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
+ def shouldLogAtThisPhase = false
+
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 09ac3e5f6f..74f4769fec 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -12,6 +12,7 @@ import util.Statistics
import Flags._
import base.Attachments
import scala.annotation.tailrec
+import scala.tools.nsc.io.AbstractFile
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
@@ -27,7 +28,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//protected var activeLocks = 0
/** Used for debugging only */
- //protected var lockedSyms = collection.immutable.Set[Symbol]()
+ //protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
private var recursionTable = immutable.Map.empty[Symbol, Int]
@@ -47,13 +48,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new free term. Its owner is NoSymbol.
*/
- def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol =
- new FreeTermSymbol(name, value, origin) initFlags flags setInfo info
+ def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol =
+ new FreeTermSymbol(name, value, origin) initFlags flags
/** Create a new free type. Its owner is NoSymbol.
*/
- def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: Long = 0L, origin: String): FreeTypeSymbol =
- new FreeTypeSymbol(name, value, origin) initFlags flags setInfo info
+ def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
+ new FreeTypeSymbol(name, origin) initFlags flags
/** The original owner of a class. Used by the backend to generate
* EnclosingMethod attributes.
@@ -63,17 +64,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
abstract class SymbolContextApiImpl extends SymbolContextApi {
this: Symbol =>
- def kind: String = kindString
def isExistential: Boolean = this.isExistentiallyBound
def isParamWithDefault: Boolean = this.hasDefault
def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
+ def isJava: Boolean = this hasFlag JAVA
+ def isVal: Boolean = isTerm && !isModule && !isMethod && !isMutable
+ def isVar: Boolean = isTerm && !isModule && !isMethod && isMutable
def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match {
case n: TermName => newTermSymbol(n, pos, newFlags)
case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
}
+ def knownDirectSubclasses = children
+ def baseClasses = info.baseClasses
+ def module = sourceModule
def thisPrefix: Type = thisType
def selfType: Type = typeOfThis
def typeSignature: Type = info
@@ -108,7 +114,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// with the proper specific type.
def rawname: NameType
def name: NameType
- def name_=(n: Name): Unit
+ def name_=(n: Name): Unit = {
+ if (shouldLogAtThisPhase) {
+ val msg = s"Renaming $fullLocationString to $n"
+ if (isSpecialized) debuglog(msg) else log(msg)
+ }
+ }
def asNameType(n: Name): NameType
private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
@@ -517,6 +528,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isTypeParameterOrSkolem = false
def isTypeSkolem = false
def isTypeMacro = false
+ def isInvariant = !isCovariant && !isContravariant
/** Qualities of Terms, always false for TypeSymbols.
*/
@@ -685,11 +697,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
|| hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
)
def hasBridgeAnnotation = hasAnnotation(BridgeClass)
- def hasStaticAnnotation = hasAnnotation(StaticClass)
def isDeprecated = hasAnnotation(DeprecatedAttr)
def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0)
+ def hasDeprecatedInheritanceAnnotation
+ = hasAnnotation(DeprecatedInheritanceAttr)
+ def deprecatedInheritanceMessage
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
+ def deprecatedInheritanceVersion
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
+ def hasDeprecatedOverridingAnnotation
+ = hasAnnotation(DeprecatedOverridingAttr)
+ def deprecatedOverridingMessage
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
+ def deprecatedOverridingVersion
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
// !!! when annotation arguments are not literal strings, but any sort of
// assembly of strings, there is a fair chance they will turn up here not as
@@ -846,7 +869,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isInitialized: Boolean =
validTo != NoPeriod
- /** Determines whether this symbol can be loaded by subsequent reflective compilation */
+ /** Can this symbol be loaded by a reflective mirror?
+ *
+ * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
+ * Such annotations (also called "pickles") are applied on top-level classes and include information
+ * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
+ * are typically unreachable and information about them gets lost.
+ *
+ * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
+ * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
+ */
final def isLocatable: Boolean = {
if (this == NoSymbol) return false
if (isRoot || isRootPackage) return true
@@ -938,7 +970,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** If this symbol has an expanded name, its original name, otherwise its name itself.
* @see expandName
*/
- def originalName: Name = nme.originalName(name)
+ def originalName: Name = nme.originalName(nme.dropLocalSuffix(name))
/** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
*/
@@ -983,7 +1015,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def fullNameInternal(separator: Char): Name = (
if (isRoot || isRootPackage || this == NoSymbol) name
else if (owner.isEffectiveRoot) name
- else effectiveOwner.enclClass.fullNameAsName(separator) append separator append name
+ else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
)
def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
@@ -1567,7 +1599,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setInfo (this.info cloneInfo clone)
setAnnotations this.annotations
)
- this.attachments.all.foreach(clone.addAttachment)
+ this.attachments.all.foreach(clone.updateAttachment)
if (clone.thisSym != clone)
clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
@@ -1783,26 +1815,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
} else owner.enclosingTopLevelClass
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
- def isCoDefinedWith(that: Symbol) = {
- (this.rawInfo ne NoType) &&
- (this.effectiveOwner == that.effectiveOwner) && {
- !this.effectiveOwner.isPackageClass ||
- (this.sourceFile eq null) ||
- (that.sourceFile eq null) ||
- (this.sourceFile == that.sourceFile) || {
- // recognize companion object in separate file and fail, else compilation
- // appears to succeed but highly opaque errors come later: see bug #1286
- if (this.sourceFile.path != that.sourceFile.path) {
- // The cheaper check can be wrong: do the expensive normalization
- // before failing.
- if (this.sourceFile.canonicalPath != that.sourceFile.canonicalPath)
- throw InvalidCompanions(this, that)
- }
-
- false
- }
- }
- }
+ def isCoDefinedWith(that: Symbol) = (
+ (this.rawInfo ne NoType)
+ && (this.effectiveOwner == that.effectiveOwner)
+ && ( !this.effectiveOwner.isPackageClass
+ || (this.sourceFile eq null)
+ || (that.sourceFile eq null)
+ || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization
+ || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath)
+ )
+ )
/** The internal representation of classes and objects:
*
@@ -2058,21 +2080,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* of sourceFile (which is expected at least in the IDE only to
* return actual source code.) So sourceFile has classfiles filtered out.
*/
- private def sourceFileOnly(file: AbstractFileType): AbstractFileType =
+ private def sourceFileOnly(file: AbstractFile): AbstractFile =
if ((file eq null) || (file.path endsWith ".class")) null else file
- private def binaryFileOnly(file: AbstractFileType): AbstractFileType =
+ private def binaryFileOnly(file: AbstractFile): AbstractFile =
if ((file eq null) || !(file.path endsWith ".class")) null else file
- final def binaryFile: AbstractFileType = binaryFileOnly(associatedFile)
- final def sourceFile: AbstractFileType = sourceFileOnly(associatedFile)
+ final def binaryFile: AbstractFile = binaryFileOnly(associatedFile)
+ final def sourceFile: AbstractFile = sourceFileOnly(associatedFile)
/** Overridden in ModuleSymbols to delegate to the module class. */
- def associatedFile: AbstractFileType = enclosingTopLevelClass.associatedFile
- def associatedFile_=(f: AbstractFileType) { abort("associatedFile_= inapplicable for " + this) }
+ def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
+ def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
@deprecated("Use associatedFile_= instead", "2.10.0")
- def sourceFile_=(f: AbstractFileType): Unit = associatedFile_=(f)
+ def sourceFile_=(f: AbstractFile): Unit = associatedFile_=(f)
/** If this is a sealed class, its known direct subclasses.
* Otherwise, the empty set.
@@ -2181,10 +2203,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* If settings.Yshowsymkinds, adds abbreviated symbol kind.
*/
def nameString: String = (
- if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + decodedName
- else if (settings.uniqid.value && !settings.Yshowsymkinds.value) decodedName + "#" + id
- else if (!settings.uniqid.value && settings.Yshowsymkinds.value) decodedName + "#" + abbreviatedKindString
- else decodedName + "#" + id + "#" + abbreviatedKindString
+ if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + originalName.decode
+ else if (settings.uniqid.value && !settings.Yshowsymkinds.value) originalName.decode + "#" + id
+ else if (!settings.uniqid.value && settings.Yshowsymkinds.value) originalName.decode + "#" + abbreviatedKindString
+ else originalName.decode + "#" + id + "#" + abbreviatedKindString
)
def fullNameString: String = {
@@ -2294,9 +2316,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
_rawname
}
- def name_=(name: Name) {
+ override def name_=(name: Name) {
if (name != rawname) {
- log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
+ super.name_=(name) // logging
changeNameInOwners(name)
_rawname = name.toTermName
}
@@ -2456,7 +2478,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private var flatname: TermName = null
override def associatedFile = moduleClass.associatedFile
- override def associatedFile_=(f: AbstractFileType) { moduleClass.associatedFile = f }
+ override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f }
override def moduleClass = referenced
override def companionClass =
@@ -2597,9 +2619,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// TODO - don't allow names to be renamed in this unstructured a fashion.
// Rename as little as possible. Enforce invariants on all renames.
- def name_=(name: Name) {
+ override def name_=(name: Name) {
if (name != rawname) {
- log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
+ super.name_=(name) // logging
changeNameInOwners(name)
_rawname = name.toTypeName
}
@@ -2760,9 +2782,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
extends TypeSymbol(initOwner, initPos, initName) with ClassSymbolApi {
type TypeOfClonedSymbol = ClassSymbol
- private[this] var flatname: TypeName = _
- private[this] var _associatedFile: AbstractFileType = _
- private[this] var thissym: Symbol = this
+ private[this] var flatname: TypeName = _
+ private[this] var _associatedFile: AbstractFile = _
+ private[this] var thissym: Symbol = this
private[this] var thisTypeCache: Type = _
private[this] var thisTypePeriod = NoPeriod
@@ -2860,7 +2882,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
- override def associatedFile_=(f: AbstractFileType) { _associatedFile = f }
+ override def associatedFile_=(f: AbstractFile) { _associatedFile = f }
override def reset(completer: Type): this.type = {
super.reset(completer)
@@ -3057,9 +3079,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
implicit val FreeTermSymbolTag = ClassTag[FreeTermSymbol](classOf[FreeTermSymbol])
- class FreeTypeSymbol(name0: TypeName, value0: => Any, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi {
- def value = value0
- }
+ class FreeTypeSymbol(name0: TypeName, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi
implicit val FreeTypeSymbolTag = ClassTag[FreeTypeSymbol](classOf[FreeTypeSymbol])
/** An object representing a missing symbol */
@@ -3070,7 +3090,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def asNameType(n: Name) = n.toTermName
def rawname = nme.NO_NAME
def name = nme.NO_NAME
- def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
+ override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
synchronized {
setInfo(NoType)
@@ -3195,13 +3215,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (settings.debug.value) printStackTrace()
}
- case class InvalidCompanions(sym1: Symbol, sym2: Symbol) extends Throwable({
- "Companions '" + sym1 + "' and '" + sym2 + "' must be defined in same file:\n" +
- " Found in " + sym1.sourceFile.canonicalPath + " and " + sym2.sourceFile.canonicalPath
- }) {
- override def toString = getMessage
- }
-
/** A class for type histories */
private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 3a930a195b..6ef4c3f660 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -67,7 +67,7 @@ abstract class TreeInfo {
/** Is tree an expression which can be inlined without affecting program semantics?
*
- * Note that this is not called "isExprSafeToInline" since purity (lack of side-effects)
+ * Note that this is not called "isExprPure" since purity (lack of side-effects)
* is not the litmus test. References to modules and lazy vals are side-effecting,
* both because side-effecting code may be executed and because the first reference
* takes a different code path than all to follow; but they are safe to inline
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 0180ed4c4f..5a6d6ce7c7 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -8,7 +8,7 @@ package internal
import Flags._
import base.Attachments
-import collection.mutable.{ListBuffer, LinkedHashSet}
+import scala.collection.mutable.{ListBuffer, LinkedHashSet}
import util.Statistics
trait Trees extends api.Trees { self: SymbolTable =>
@@ -137,7 +137,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = {
- val s = collection.mutable.LinkedHashSet[S]()
+ val s = scala.collection.mutable.LinkedHashSet[S]()
def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S]
for (t <- this) {
addIfFree(t.symbol)
@@ -400,9 +400,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args)
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends SymTree with TermTree with ApplyDynamicApi
- object ApplyDynamic extends ApplyDynamicExtractor
+ case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
override def symbol: Symbol = qual.symbol
@@ -496,7 +494,12 @@ trait Trees extends api.Trees { self: SymbolTable =>
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
- class StrictTreeCopier extends TreeCopierOps {
+ override type TreeCopier <: InternalTreeCopierOps
+ abstract class InternalTreeCopierOps extends TreeCopierOps {
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
+ }
+
+ class StrictTreeCopier extends InternalTreeCopierOps {
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) =
new ClassDef(mods, name.toTypeName, tparams, impl).copyAttrs(tree)
def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) =
@@ -590,7 +593,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
}
- class LazyTreeCopier extends TreeCopierOps {
+ class LazyTreeCopier extends InternalTreeCopierOps {
val treeCopy: TreeCopier = newStrictTreeCopier
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) = tree match {
case t @ ClassDef(mods0, name0, tparams0, impl0)
@@ -1585,7 +1588,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
implicit val SuperTag = ClassTag[Super](classOf[Super])
implicit val ThisTag = ClassTag[This](classOf[This])
implicit val SelectTag = ClassTag[Select](classOf[Select])
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 01f615c5cc..cda8a382de 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -317,25 +317,6 @@ trait Types extends api.Types { self: SymbolTable =>
def substituteSymbols(from: List[Symbol], to: List[Symbol]): Type = substSym(from, to)
def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
- def isConcrete = {
- def notConcreteSym(sym: Symbol) =
- sym.isAbstractType && !sym.isExistential
-
- def notConcreteTpe(tpe: Type): Boolean = tpe match {
- case ThisType(_) => false
- case SuperType(_, _) => false
- case SingleType(pre, sym) => notConcreteSym(sym)
- case ConstantType(_) => false
- case TypeRef(_, sym, args) => notConcreteSym(sym) || (args exists notConcreteTpe)
- case RefinedType(_, _) => false
- case ExistentialType(_, _) => false
- case AnnotatedType(_, tp, _) => notConcreteTpe(tp)
- case _ => true
- }
-
- !notConcreteTpe(this)
- }
-
// the only thingies that we want to splice are: 1) type parameters, 2) abstract type members
// the thingies that we don't want to splice are: 1) concrete types (obviously), 2) existential skolems
def isSpliceable = {
@@ -1010,7 +991,11 @@ trait Types extends api.Types { self: SymbolTable =>
def toLongString = {
val str = toString
if (str == "type") widen.toString
- else if ((str endsWith ".type") && !typeSymbol.isModuleClass) str + " (with underlying type " + widen + ")"
+ else if ((str endsWith ".type") && !typeSymbol.isModuleClass)
+ widen match {
+ case RefinedType(_, _) => "" + widen
+ case _ => s"$str (with underlying type $widen)"
+ }
else str
}
@@ -1632,7 +1617,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def safeToString: String = parentsString(parents) + (
(if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
- decls.mkString("{", "; ", "}") else "")
+ fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
)
}
@@ -1819,7 +1804,6 @@ trait Types extends api.Types { self: SymbolTable =>
false
}))
}
-
override def kind = "RefinedType"
}
@@ -2005,9 +1989,11 @@ trait Types extends api.Types { self: SymbolTable =>
/** A nicely formatted string with newlines and such.
*/
def formattedToString: String =
- parents.mkString("\n with ") +
- (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
- decls.mkString(" {\n ", "\n ", "\n}") else "")
+ parents.mkString("\n with ") + (
+ if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}")
+ else ""
+ )
}
object ClassInfoType extends ClassInfoTypeExtractor
@@ -2466,7 +2452,7 @@ trait Types extends api.Types { self: SymbolTable =>
def refinementString = (
if (sym.isStructuralRefinement) (
- decls filter (sym => sym.isPossibleInRefinement && sym.isPublic)
+ fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
map (_.defString)
mkString("{", "; ", "}")
)
@@ -3468,9 +3454,9 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** A temporary type representing the erasure of a user-defined value type.
- * Created during phase reasure, eliminated again in posterasure.
- * @param sym The value class symbol
- * @param underlying The underlying type before erasure
+ * Created during phase erasure, eliminated again in posterasure.
+ *
+ * @param original The underlying type before erasure
*/
abstract case class ErasedValueType(original: TypeRef) extends UniqueType {
override def safeToString = "ErasedValueType("+original+")"
@@ -3712,10 +3698,15 @@ trait Types extends api.Types { self: SymbolTable =>
* may or may not be poly? (It filched the standard "canonical creator" name.)
*/
object GenPolyType {
- def apply(tparams: List[Symbol], tpe: Type): Type = (
+ def apply(tparams: List[Symbol], tpe: Type): Type = {
+ tpe match {
+ case MethodType(_, _) =>
+ assert(tparams forall (_.isInvariant), "Trying to create a method with variant type parameters: " + ((tparams, tpe)))
+ case _ =>
+ }
if (tparams.nonEmpty) typeFun(tparams, tpe)
else tpe // it's okay to be forgiving here
- )
+ }
def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match {
case PolyType(tparams, restpe) => Some((tparams, restpe))
case _ => Some((Nil, tpe))
@@ -3741,12 +3732,12 @@ trait Types extends api.Types { self: SymbolTable =>
*
* tpe1 where { tparams }
*
- * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`.
+ * where `tpe1` is the result of extrapolating `tpe` with respect to `tparams`.
* Extrapolating means that type variables in `tparams` occurring
* in covariant positions are replaced by upper bounds, (minus any
* SingletonClass markers), type variables in `tparams` occurring in
* contravariant positions are replaced by upper bounds, provided the
- * resulting type is legal wrt to stability, and does not contain any type
+ * resulting type is legal with regard to stability, and does not contain any type
* variable in `tparams`.
*
* The abstraction drops all type parameters that are not directly or
@@ -3944,13 +3935,15 @@ trait Types extends api.Types { self: SymbolTable =>
def avoidWiden: Boolean = avoidWidening
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numlo == NoType || isNumericSubType(numlo, tp))
- numlo = tp
- else if (!isNumericSubType(tp, numlo))
- numlo = numericLoBound
+ if (!lobounds.contains(tp)) {
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numlo == NoType || isNumericSubType(numlo, tp))
+ numlo = tp
+ else if (!isNumericSubType(tp, numlo))
+ numlo = numericLoBound
+ }
+ else lobounds ::= tp
}
- else lobounds ::= tp
}
def checkWidening(tp: Type) {
@@ -3962,14 +3955,16 @@ trait Types extends api.Types { self: SymbolTable =>
}
def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- checkWidening(tp)
- if (isNumericBound && isNumericValueType(tp)) {
- if (numhi == NoType || isNumericSubType(tp, numhi))
- numhi = tp
- else if (!isNumericSubType(numhi, tp))
- numhi = numericHiBound
+ if (!hibounds.contains(tp)) {
+ checkWidening(tp)
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numhi == NoType || isNumericSubType(tp, numhi))
+ numhi = tp
+ else if (!isNumericSubType(numhi, tp))
+ numhi = numericHiBound
+ }
+ else hibounds ::= tp
}
- else hibounds ::= tp
}
def isWithinBounds(tp: Type): Boolean =
@@ -5724,8 +5719,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
- * type selections with the same name of equal (wrt) =:= prefixes are
- * considered equal wrt =:=
+ * type selections with the same name of equal (as determined by `=:=`) prefixes are
+ * considered equal in regard to `=:=`.
*/
def beginsWithTypeVarOrIsRefined(tp: Type): Boolean = tp match {
case SingleType(pre, sym) =>
@@ -6372,7 +6367,7 @@ trait Types extends api.Types { self: SymbolTable =>
// Produce a single type for this frontier by merging the prefixes and arguments of those
// typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information wrt subtyping. Before
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
// merging, strip targs that refer to bound tparams (when we're computing the lub of type
// constructors.) Also filter out all types that are a subtype of some other type.
if (isUniformFrontier) {
@@ -6421,7 +6416,7 @@ trait Types extends api.Types { self: SymbolTable =>
loop(initialBTSes)
}
- /** The minimal symbol (wrt Symbol.isLess) of a list of types */
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
private def minSym(tps: List[Type]): Symbol =
(tps.head.typeSymbol /: tps.tail) {
(sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
@@ -6547,7 +6542,7 @@ trait Types extends api.Types { self: SymbolTable =>
* test/continuations-neg/function3.scala goes into an infinite loop.
* (Even if the calls are to typeSymbolDirect.)
*/
- def isNumericSubType(tp1: Type, tp2: Type) = (
+ def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
isNumericValueType(tp1)
&& isNumericValueType(tp2)
&& isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
@@ -6703,7 +6698,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var globalGlbDepth = 0
private final val globalGlbLimit = 2
- /** The greatest lower bound wrt <:< of a list of types */
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
def glb(ts: List[Type]): Type = elimSuper(ts) match {
case List() => AnyClass.tpe
case List(t) => t
@@ -6725,8 +6720,8 @@ trait Types extends api.Types { self: SymbolTable =>
case ts0 => glbNorm(ts0, depth)
}
- /** The greatest lower bound wrt <:< of a list of types, which have been normalized
- * wrt elimSuper */
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
protected def glbNorm(ts: List[Type], depth: Int): Type = {
def glb0(ts0: List[Type]): Type = ts0 match {
case List() => AnyClass.tpe
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 2e00316d5b..a9994a037f 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -14,13 +14,13 @@ import java.lang.Double.longBitsToDouble
import Flags._
import PickleFormat._
import scala.collection.{ mutable, immutable }
-import collection.mutable.ListBuffer
-import annotation.switch
+import scala.collection.mutable.ListBuffer
+import scala.annotation.switch
/** @author Martin Odersky
* @version 1.0
*/
-abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
+abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ {
val global: SymbolTable
import global._
@@ -446,7 +446,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
private def readArrayAnnot() = {
readByte() // skip the `annotargarray` tag
val end = readNat() + readIndex
- until(end, () => readClassfileAnnotArg(readNat())).toArray(ClassfileAnnotArgTag)
+ until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
}
protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation))
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index c4c5dc3a1c..71cc80895d 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -2,7 +2,7 @@ package scala.reflect
package internal
package transform
-import language.existentials
+import scala.language.existentials
trait Transforms { self: SymbolTable =>
@@ -38,4 +38,4 @@ trait Transforms { self: SymbolTable =>
def transformedType(tpe: Type) =
erasure.scalaErasure(uncurry.uncurry(tpe))
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
index 51e540e235..0d0f16372c 100644
--- a/src/reflect/scala/reflect/internal/util/HashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -13,7 +13,7 @@ object HashSet {
new HashSet[T](label, initialCapacity)
}
-class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with collection.generic.Clearable {
+class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with scala.collection.generic.Clearable {
private var used = 0
private var table = new Array[AnyRef](initialCapacity)
private def index(x: Int): Int = math.abs(x % table.length)
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index e4c6e4aca1..0268881be7 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -6,9 +6,9 @@
package scala.reflect.internal.util
-import reflect.ClassTag
-import reflect.base.Attachments
-import reflect.api.PositionApi
+import scala.reflect.ClassTag
+import scala.reflect.base.Attachments
+import scala.reflect.api.PositionApi
object Position {
val tabInc = 8
@@ -274,4 +274,4 @@ class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int)
override def isTransparent = true
override def makeTransparent = this
override def show = "<"+start+":"+end+">"
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index 747c1ad298..9a71e02e08 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -8,7 +8,7 @@ package scala.reflect.internal.util
import scala.tools.nsc.io.{ AbstractFile, VirtualFile }
import scala.collection.mutable.ArrayBuffer
-import annotation.tailrec
+import scala.annotation.tailrec
import java.util.regex.Pattern
import java.io.IOException
import scala.reflect.internal.Chars._
@@ -107,15 +107,15 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
- // If non-whitespace tokens run all the way up to EOF,
- // positions go wrong because the correct end of the last
- // token cannot be used as an index into the char array.
- // The least painful way to address this was to add a
- // newline to the array.
- val content = (
- if (content0.length == 0 || !content0.last.isWhitespace)
- content0 :+ '\n'
- else content0
+ // If non-whitespace tokens run all the way up to EOF,
+ // positions go wrong because the correct end of the last
+ // token cannot be used as an index into the char array.
+ // The least painful way to address this was to add a
+ // newline to the array.
+ val content = (
+ if (content0.length == 0 || !content0.last.isWhitespace)
+ content0 :+ '\n'
+ else content0
)
val length = content.length
def start = 0
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index f7b81ca252..2c90d2d525 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,6 +1,6 @@
package scala.reflect.internal.util
-import collection.mutable
+import scala.collection.mutable
object Statistics {
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
index 2e60ce3bcc..8e2bcc2ff7 100644
--- a/src/reflect/scala/reflect/internal/util/TableDef.scala
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -1,7 +1,7 @@
package scala.reflect.internal.util
import TableDef._
-import language.postfixOps
+import scala.language.postfixOps
/** A class for representing tabular data in a way that preserves
* its inner beauty. See Exceptional for an example usage.
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index cecf8e4658..fa83f70f3a 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -2,7 +2,7 @@ package scala.reflect.internal
package util
import scala.collection.{ mutable, immutable }
-import language.postfixOps
+import scala.language.postfixOps
trait TraceSymbolActivity {
val global: SymbolTable
@@ -108,12 +108,12 @@ trait TraceSymbolActivity {
sym.name.decode + "#" + sym.id
}
- private def freq[T, U](xs: collection.Traversable[T])(fn: T => U): List[(U, Int)] = {
+ private def freq[T, U](xs: scala.collection.Traversable[T])(fn: T => U): List[(U, Int)] = {
val ys = xs groupBy fn mapValues (_.size)
ys.toList sortBy (-_._2)
}
- private def showMapFreq[T](xs: collection.Map[T, Traversable[_]])(showFn: T => String) {
+ private def showMapFreq[T](xs: scala.collection.Map[T, Traversable[_]])(showFn: T => String) {
xs.mapValues(_.size).toList.sortBy(-_._2) take 100 foreach { case (k, size) =>
show(size, showFn(k))
}
diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala
index ad100d7e89..eff7f34b02 100644
--- a/src/reflect/scala/reflect/macros/Aliases.scala
+++ b/src/reflect/scala/reflect/macros/Aliases.scala
@@ -17,12 +17,12 @@ trait Aliases {
type Expr[+T] = universe.Expr[T]
val Expr = universe.Expr
- type AbsTypeTag[T] = universe.AbsTypeTag[T]
+ type WeakTypeTag[T] = universe.WeakTypeTag[T]
type TypeTag[T] = universe.TypeTag[T]
- val AbsTypeTag = universe.AbsTypeTag
+ val WeakTypeTag = universe.WeakTypeTag
val TypeTag = universe.TypeTag
- def absTypeTag[T](implicit attag: AbsTypeTag[T]) = attag
+ def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
def typeTag[T](implicit ttag: TypeTag[T]) = ttag
- def absTypeOf[T](implicit attag: AbsTypeTag[T]): Type = attag.tpe
+ def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
}
diff --git a/src/reflect/scala/reflect/macros/Exprs.scala b/src/reflect/scala/reflect/macros/Exprs.scala
index ceaab06d12..280d5508c8 100644
--- a/src/reflect/scala/reflect/macros/Exprs.scala
+++ b/src/reflect/scala/reflect/macros/Exprs.scala
@@ -4,5 +4,5 @@ package macros
trait Exprs {
self: Context =>
- def Expr[T: AbsTypeTag](tree: Tree): Expr[T]
+ def Expr[T: WeakTypeTag](tree: Tree): Expr[T]
}
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
index a8a8b814b1..80153ff257 100644
--- a/src/reflect/scala/reflect/macros/Infrastructure.scala
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -43,7 +43,7 @@ trait Infrastructure {
* val importer = ru.mkImporter(c.universe).asInstanceOf[ru.Importer { val from: c.universe.type }]
* val tree = c.resetAllAttrs(x.tree.duplicate)
* val imported = importer.importTree(tree)
- * val valueOfX = toolBox.runExpr(imported).asInstanceOf[T]
+ * val valueOfX = toolBox.eval(imported).asInstanceOf[T]
* ...
* }
*/
@@ -77,23 +77,4 @@ trait Infrastructure {
/** Returns a macro definition which triggered this macro expansion.
*/
val currentMacro: Symbol
-
- // todo. redo caches as discussed on Reflecting Meeting 2012/03/29
- // https://docs.google.com/document/d/1oUZGQpdt2qwioTlJcSt8ZFQwVLTvpxn8xa67P8OGVpU/edit
-
- /** A cache shared by all invocations of all macros across all compilation runs.
- *
- * Needs to be used with extreme care, since memory leaks here will swiftly crash the presentation compiler.
- * For example, Scala IDE typically launches a compiler run on every edit action so there might be hundreds of runs per minute.
- */
- val globalCache: collection.mutable.Map[Any, Any]
-
- /** A cache shared by all invocations of the same macro within a single compilation run.
- *
- * This cache is cleared automatically after a compilation run is completed or abandoned.
- * It is also specific to a particular macro definition.
- *
- * To share data between different macros and/or different compilation runs, use ``globalCache''.
- */
- val cache: collection.mutable.Map[Any, Any]
}
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index ea87c5842e..1742d07b60 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -5,7 +5,7 @@ trait Parsers {
self: Context =>
/** .. */
- // todo. distinguish between `parseExpr` and `parse`
+ // todo. distinguish between `parse` and `parse`
def parse(code: String): Tree
/** Represents an error during parsing
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
index 1bee17d548..bdc6687edc 100644
--- a/src/reflect/scala/reflect/macros/Reifiers.scala
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -1,6 +1,8 @@
package scala.reflect
package macros
+import scala.reflect.api.PositionApi
+
trait Reifiers {
self: Context =>
@@ -86,6 +88,6 @@ trait Reifiers {
// made these guys non path-dependent, otherwise exception handling quickly becomes a mess
-case class ReificationError(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
+case class ReificationError(val pos: PositionApi, val msg: String) extends Throwable(msg)
-case class UnexpectedReificationError(val pos: reflect.api.PositionApi, val msg: String, val cause: Throwable = null) extends Throwable(msg, cause)
+case class UnexpectedReificationError(val pos: PositionApi, val msg: String, val cause: Throwable = null) extends Throwable(msg, cause)
diff --git a/src/reflect/scala/reflect/macros/TypeTags.scala b/src/reflect/scala/reflect/macros/TypeTags.scala
index 8f590d1de4..2f15e37f6a 100644
--- a/src/reflect/scala/reflect/macros/TypeTags.scala
+++ b/src/reflect/scala/reflect/macros/TypeTags.scala
@@ -4,6 +4,6 @@ package macros
trait TypeTags {
self: Context =>
- def AbsTypeTag[T](tpe: Type): AbsTypeTag[T]
+ def WeakTypeTag[T](tpe: Type): WeakTypeTag[T]
def TypeTag[T](tpe: Type): TypeTag[T]
}
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 8d9711dedd..7fa2e7cbae 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -10,7 +10,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def attachments: base.Attachments { type Pos = Position }
/** ... */
- def addAttachment(attachment: Any): AttachableApi.this.type
+ def updateAttachment[T: ClassTag](attachment: T): AttachableApi.this.type
/** ... */
def removeAttachment[T: ClassTag]: AttachableApi.this.type
@@ -24,9 +24,15 @@ abstract class Universe extends scala.reflect.api.Universe {
*/
trait SymbolContextApi extends SymbolApi with AttachableApi { self: Symbol =>
+ def deSkolemize: Symbol
+
+ /** The position of this symbol
+ */
+ def pos: Position
+
def setTypeSignature(tpe: Type): Symbol
- def setAnnotations(annots: AnnotationInfo*): Symbol
+ def setAnnotations(annots: Annotation*): Symbol
def setName(name: Name): Symbol
diff --git a/src/reflect/scala/reflect/runtime/AbstractFile.scala b/src/reflect/scala/reflect/runtime/AbstractFile.scala
deleted file mode 100644
index 0f88af1b0a..0000000000
--- a/src/reflect/scala/reflect/runtime/AbstractFile.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package scala.reflect
-package runtime
-
-class AbstractFile(val jfile: java.io.File) extends internal.AbstractFileApi {
- def path: String = jfile.getPath()
- def canonicalPath: String = jfile.getCanonicalPath()
-}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 967ac69148..0d9e90d3a6 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -9,21 +9,22 @@ import java.lang.reflect.{
Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
+import java.lang.annotation.{Annotation => jAnnotation}
import java.io.IOException
import internal.MissingRequirementError
import internal.pickling.ByteCodecs
import internal.ClassfileConstants._
import internal.pickling.UnPickler
-import collection.mutable.{ HashMap, ListBuffer }
+import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
//import scala.tools.nsc.util.ScalaClassLoader
//import scala.tools.nsc.util.ScalaClassLoader._
import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
-import language.existentials
+import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
import scala.reflect.internal.util.Collections._
-trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: SymbolTable =>
+trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
@@ -62,9 +63,9 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
class JavaMirror(owner: Symbol,
/** Class loader that is a mastermind behind the reflexive mirror */
val classLoader: ClassLoader
- ) extends Roots(owner) with super.JavaMirror { wholemirror =>
+ ) extends Roots(owner) with super.JavaMirror { thisMirror =>
- val universe: self.type = self
+ val universe: thisUniverse.type = thisUniverse
import definitions._
@@ -120,19 +121,69 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// ----------- Implementations of mirror operations and classes -------------------
- private def ErrorInnerClass(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
- private def ErrorInnerModule(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
- private def ErrorStaticClass(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
- private def ErrorStaticModule(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
- private def ErrorNotMember(wannabe: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${wannabe.kind} ${wannabe.fullName}")
- private def ErrorNotField(wannabe: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $wannabe")
- private def ErrorNonExistentField(wannabe: Symbol) = throw new ScalaReflectionException(s"""
- |Scala field ${wannabe.name} isn't represented as a Java field, neither it has a Java accessor method
+ private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+ private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+ private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+ private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+ private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+ private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym")
+ private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException(s"""
+ |Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
|note that private parameters of class constructors don't get mapped onto fields and/or accessors,
|unless they are used outside of their declaring constructors.
""".trim.stripMargin)
- private def ErrorSetImmutableField(wannabe: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${wannabe.name}")
- private def ErrorNotConstructor(wannabe: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $wannabe")
+ private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}")
+ private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
+ private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+
+ /** Helper functions for extracting typed values from a (Class[_], Any)
+ * representing an annotation argument.
+ */
+ private object toAnnotArg {
+ val StringClass = classOf[String]
+ val ClassClass = classOf[jClass[_]]
+ object PrimitiveClass { def unapply(x: jClass[_]) = x.isPrimitive }
+ object EnumClass { def unapply(x: jClass[_]) = x.isEnum }
+ object ArrayClass { def unapply(x: jClass[_]) = x.isArray }
+ object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation }
+
+ object ConstantArg {
+ def enumToSymbol(enum: Enum[_]): Symbol = {
+ val staticPartOfEnum = classToScala(enum.getClass).companionSymbol
+ staticPartOfEnum.typeSignature.declaration(enum.name: TermName)
+ }
+
+ def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match {
+ case (StringClass | PrimitiveClass(), value) => Some(value)
+ case (ClassClass, value: jClass[_]) => Some(classToScala(value).toType)
+ case (EnumClass(), value: Enum[_]) => Some(enumToSymbol(value))
+ case _ => None
+ }
+ }
+ def apply(schemaAndValue: (jClass[_], Any)): ClassfileAnnotArg = schemaAndValue match {
+ case ConstantArg(value) => LiteralAnnotArg(Constant(value))
+ case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(ScalaRunTime.arrayElementClass(clazz) -> x)))
+ case (AnnotationClass(), value: jAnnotation) => NestedAnnotArg(JavaAnnotationProxy(value))
+ case _ => UnmappableAnnotArg
+ }
+ }
+ private case class JavaAnnotationProxy(jann: jAnnotation) extends AnnotationInfo {
+ override val atp: Type = classToScala(jann.annotationType).toType
+ override val args: List[Tree] = Nil
+ override def original: Tree = EmptyTree
+ override def setOriginal(t: Tree): this.type = throw new Exception("setOriginal inapplicable for " + this)
+ override def pos: Position = NoPosition
+ override def setPos(pos: Position): this.type = throw new Exception("setPos inapplicable for " + this)
+ override def toString = completeAnnotationToString(this)
+
+ // todo. find out the exact order of assocs as they are written in the class file
+ // currently I'm simply sorting the methods to guarantee stability of the output
+ override lazy val assocs: List[(Name, ClassfileAnnotArg)] = (
+ jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m =>
+ (m.getName: TermName) -> toAnnotArg(m.getReturnType -> m.invoke(jann))
+ )
+ )
+ }
def reflect[T: ClassTag](obj: T): InstanceMirror = new JavaInstanceMirror(obj)
@@ -154,13 +205,30 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol = classToScala(rtcls).companionModule.asModule
- private def checkMemberOf(wannabe: Symbol, owner: ClassSymbol) {
- if (wannabe.owner == AnyClass || wannabe.owner == AnyRefClass || wannabe.owner == ObjectClass) {
+ private def ensuringNotFree(sym: Symbol)(body: => Any) {
+ val freeType = sym.ownerChain find (_.isFreeType)
+ freeType match {
+ case Some(freeType) => ErrorFree(sym, freeType)
+ case _ => body
+ }
+ }
+
+ private def checkMemberOf(sym: Symbol, owner: ClassSymbol) {
+ if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) {
// do nothing
- } else if (wannabe.owner == AnyValClass) {
- if (!owner.isPrimitiveValueClass && !owner.isDerivedValueClass) ErrorNotMember(wannabe, owner)
+ } else if (sym.owner == AnyValClass) {
+ if (!owner.isPrimitiveValueClass && !owner.isDerivedValueClass) ErrorNotMember(sym, owner)
} else {
- if (!(owner.info.baseClasses contains wannabe.owner)) ErrorNotMember(wannabe, owner)
+ ensuringNotFree(sym) {
+ if (!(owner.info.baseClasses contains sym.owner)) ErrorNotMember(sym, owner)
+ }
+ }
+ }
+
+ private def checkConstructorOf(sym: Symbol, owner: ClassSymbol) {
+ if (!sym.isClassConstructor) ErrorNotConstructor(sym, owner)
+ ensuringNotFree(sym) {
+ if (!owner.info.decls.toList.contains(sym)) ErrorNotConstructor(sym, owner)
}
}
@@ -172,7 +240,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
private class JavaInstanceMirror[T: ClassTag](val instance: T)
extends InstanceMirror {
- def symbol = wholemirror.classSymbol(preciseClass(instance))
+ def symbol = thisMirror.classSymbol(preciseClass(instance))
def reflectField(field: TermSymbol): FieldMirror = {
checkMemberOf(field, symbol)
if ((field.isMethod && !field.isAccessor) || field.isModule) ErrorNotField(field)
@@ -247,14 +315,13 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// the "symbol == Any_getClass || symbol == Object_getClass" test doesn't cut it
// because both AnyVal and its primitive descendants define their own getClass methods
private def isGetClass(meth: MethodSymbol) = meth.name.toString == "getClass" && meth.params.flatten.isEmpty
- private def isMagicPrimitiveMethod(meth: MethodSymbol) = meth.owner.isPrimitiveValueClass
- private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (isMagicPrimitiveMethod(meth) && meth.returnType =:= StringClass.toType)
- lazy val magicMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
- lazy val nonMagicObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
- Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
- private def isMagicMethod(meth: MethodSymbol): Boolean = {
- if (isGetClass(meth) || isStringConcat(meth) || isMagicPrimitiveMethod(meth) || meth == Predef_classOf || meth.isTermMacro) return true
- magicMethodOwners(meth.owner) && !nonMagicObjectMethods(meth)
+ private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType)
+ lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
+ lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
+ Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
+ private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
+ if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+ bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
}
// unlike other mirrors, method mirrors are created by a factory
@@ -262,7 +329,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// therefore we move special cases into separate subclasses
// rather than have them on a hot path them in a unified implementation of the `apply` method
private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
- if (isMagicMethod(symbol)) new JavaMagicMethodMirror(receiver, symbol)
+ if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
else if (symbol.params.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
else new JavaVanillaMethodMirror(receiver, symbol)
}
@@ -297,11 +364,11 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
}
- private class JavaMagicMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
+ private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
def apply(args: Any*): Any = {
// checking type conformance is too much of a hassle, so we don't do it here
- // actually it's not even necessary, because we manually dispatch arguments to magic methods below
+ // actually it's not even necessary, because we manually dispatch arguments below
val params = symbol.paramss.flatten
val perfectMatch = args.length == params.length
// todo. this doesn't account for multiple vararg parameter lists
@@ -319,36 +386,36 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def objArgs = args.asInstanceOf[Seq[AnyRef]]
def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
- def invokeMagicPrimitiveMethod = {
+ def invokePrimitiveMethod = {
val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
assert(jmeths.length == 1, jmeths.toList)
jinvoke(jmeths.head, null, objReceiver +: objArgs)
}
symbol match {
- case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
- case Any_equals => receiver.equals(objArg0)
- case Any_hashCode => receiver.hashCode
- case Any_toString => receiver.toString
- case Object_eq => objReceiver eq objArg0
- case Object_ne => objReceiver ne objArg0
- case Object_synchronized => objReceiver.synchronized(objArg0)
- case sym if isGetClass(sym) => preciseClass(receiver)
- case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
- case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
- case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
- case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
- case Array_length => ScalaRunTime.array_length(objReceiver)
- case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
- case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
- case Array_clone => ScalaRunTime.array_clone(objReceiver)
- case sym if isStringConcat(sym) => receiver.toString + objArg0
- case sym if isMagicPrimitiveMethod(sym) => invokeMagicPrimitiveMethod
- case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
- case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
- case _ => assert(false, this)
+ case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
+ case Any_equals => receiver.equals(objArg0)
+ case Any_hashCode => receiver.hashCode
+ case Any_toString => receiver.toString
+ case Object_eq => objReceiver eq objArg0
+ case Object_ne => objReceiver ne objArg0
+ case Object_synchronized => objReceiver.synchronized(objArg0)
+ case sym if isGetClass(sym) => preciseClass(receiver)
+ case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
+ case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
+ case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
+ case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
+ case Array_length => ScalaRunTime.array_length(objReceiver)
+ case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
+ case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
+ case Array_clone => ScalaRunTime.array_clone(objReceiver)
+ case sym if isStringConcat(sym) => receiver.toString + objArg0
+ case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
+ case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
+ case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+ case _ => assert(false, this)
}
}
}
@@ -386,8 +453,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def erasure = symbol
def isStatic = false
def reflectConstructor(constructor: MethodSymbol) = {
- if (!constructor.isClassConstructor) ErrorNotConstructor(constructor, symbol)
- if (!symbol.info.decls.toList.contains(constructor)) ErrorNotConstructor(constructor, symbol)
+ checkConstructorOf(constructor, symbol)
new JavaConstructorMirror(outer, constructor)
}
def companion: Option[ModuleMirror] = symbol.companionModule match {
@@ -452,7 +518,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
private object unpickler extends UnPickler {
- val global: self.type = self
+ val global: thisUniverse.type = thisUniverse
}
/** how connected????
@@ -556,7 +622,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
* Note: If `sym` is a method or constructor, its parameter annotations are copied as well.
*/
private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) {
- // to do: implement
+ sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
}
/**
@@ -599,7 +665,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
completeRest()
}
- def completeRest(): Unit = self.synchronized {
+ def completeRest(): Unit = thisUniverse.synchronized {
val tparams = clazz.rawInfo.typeParams
val parents = try {
@@ -959,13 +1025,12 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
rawToExistential(typeRef(clazz.owner.thisType, clazz, List()))
}
case japplied: ParameterizedType =>
- val (pre, sym) = typeToScala(japplied.getRawType) match {
- case ExistentialType(tparams, TypeRef(pre, sym, _)) => (pre, sym)
- case TypeRef(pre, sym, _) => (pre, sym)
- }
+ // http://stackoverflow.com/questions/5767122/parameterizedtype-getrawtype-returns-j-l-r-type-not-class
+ val sym = classToScala(japplied.getRawType.asInstanceOf[jClass[_]])
+ val pre = sym.owner.thisType
val args0 = japplied.getActualTypeArguments
val (args, bounds) = targsToScala(pre.typeSymbol, args0.toList)
- ExistentialType(bounds, typeRef(pre, sym, args))
+ newExistentialType(bounds, typeRef(pre, sym, args))
case jarr: GenericArrayType =>
arrayType(typeToScala(jarr.getGenericComponentType))
case jtvar: jTypeVariable[_] =>
@@ -1183,9 +1248,9 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
mirrors(rootToLoader getOrElseUpdate(root, findLoader)).get.get
}
- private lazy val magicClasses: Map[(String, Name), Symbol] = {
+ private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
- Map() ++ (definitions.magicSymbols filter (_.isType) map mapEntry)
+ Map() ++ (definitions.syntheticCoreClasses map mapEntry)
}
/** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
@@ -1204,9 +1269,12 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- magicClasses get (owner.fullName, name) match {
+ syntheticCoreClasses get (owner.fullName, name) match {
case Some(tsym) =>
- owner.info.decls enter tsym
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
return tsym
case None =>
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 629df76178..1d875b10f1 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -8,8 +8,6 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen}
*/
class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
- type AbstractFileType = AbstractFile
-
def picklerPhase = SomePhase
lazy val settings = new Settings
@@ -18,7 +16,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def log(msg: => AnyRef): Unit = println(" [] "+msg)
- type TreeCopier = TreeCopierOps
+ type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index e87c6b339b..eaf7d8326f 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -38,7 +38,7 @@ object ReflectionUtils {
)
def show(cl: ClassLoader): String = {
- import language.reflectiveCalls
+ import scala.language.reflectiveCalls
def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
if (clazz == null) return false
@@ -49,7 +49,7 @@ object ReflectionUtils {
case cl: java.net.URLClassLoader =>
(cl.getURLs mkString ",")
case cl if cl != null && isAbstractFileClassLoader(cl.getClass) =>
- cl.asInstanceOf[{val root: scala.reflect.internal.AbstractFileApi}].root.canonicalPath
+ cl.asInstanceOf[{val root: scala.tools.nsc.io.AbstractFile}].root.canonicalPath
case null =>
inferBootClasspath
case _ =>
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 583b9d93f3..f3473d46a7 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -3,7 +3,7 @@ package runtime
import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
-import collection.mutable
+import scala.collection.mutable
trait SymbolLoaders { self: SymbolTable =>
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 12db7a7bf9..7705610efb 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -1,7 +1,7 @@
package scala.reflect
package runtime
-import internal.Flags.DEFERRED
+import scala.tools.nsc.io.AbstractFile
trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
@@ -14,11 +14,11 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
synchronized { super.connectModuleToClass(m, moduleClass) }
- override def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags setInfo info
+ override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags
- override def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- new FreeTypeSymbol(name, value, origin) with SynchronizedTypeSymbol initFlags flags setInfo info
+ override def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ new FreeTypeSymbol(name, origin) with SynchronizedTypeSymbol initFlags flags
override protected def makeNoSymbol: NoSymbol = new NoSymbol with SynchronizedSymbol
@@ -123,7 +123,7 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
override def associatedFile = synchronized { super.associatedFile }
- override def associatedFile_=(f: AbstractFileType) = synchronized { super.associatedFile_=(f) }
+ override def associatedFile_=(f: AbstractFile) = synchronized { super.associatedFile_=(f) }
override def thisSym: Symbol = synchronized { super.thisSym }
override def thisType: Type = synchronized { super.thisType }
override def typeOfThis: Type = synchronized { super.typeOfThis }
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
index e2bf5773d2..05debcba65 100644
--- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala
+++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
@@ -1,7 +1,7 @@
package scala.reflect
package runtime
-import collection.mutable.WeakHashMap
+import scala.collection.mutable.WeakHashMap
import java.lang.ref.WeakReference
/** A cache that maintains a bijection between Java reflection type `J`
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index ccdea3e82d..7b9f69e657 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -5,7 +5,8 @@ package object runtime {
// type is api.JavaUniverse because we only want to expose the `scala.reflect.api.*` subset of reflection
lazy val universe: api.JavaUniverse = new runtime.JavaUniverse
- // implementation magically hardwired to the `currentMirror` method below
+ // implementation hardwired to the `currentMirror` method below
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
def currentMirror: universe.Mirror = ??? // macro
}
@@ -17,7 +18,7 @@ package runtime {
if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class")
val runtimeUniverse = Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("universe"))
val currentMirror = Apply(Select(runtimeUniverse, newTermName("runtimeMirror")), List(Select(runtimeClass, newTermName("getClassLoader"))))
- c.Expr[Nothing](currentMirror)(c.AbsTypeTag.Nothing)
+ c.Expr[Nothing](currentMirror)(c.WeakTypeTag.Nothing)
}
}
}
diff --git a/src/reflect/scala/tools/nsc/io/AbstractFile.scala b/src/reflect/scala/tools/nsc/io/AbstractFile.scala
index 8d55b708b1..018a017c6d 100644
--- a/src/reflect/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/reflect/scala/tools/nsc/io/AbstractFile.scala
@@ -82,7 +82,7 @@ object AbstractFile {
* <code>global.settings.encoding.value</code>.
* </p>
*/
-abstract class AbstractFile extends reflect.internal.AbstractFileApi with Iterable[AbstractFile] {
+abstract class AbstractFile extends Iterable[AbstractFile] {
/** Returns the name of this abstract file. */
def name: String
diff --git a/src/reflect/scala/tools/nsc/io/File.scala b/src/reflect/scala/tools/nsc/io/File.scala
index 1f3cac7ee1..fce0e339e0 100644
--- a/src/reflect/scala/tools/nsc/io/File.scala
+++ b/src/reflect/scala/tools/nsc/io/File.scala
@@ -16,7 +16,7 @@ import java.io.{
import java.io.{ File => JFile }
import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
-import language.{reflectiveCalls, implicitConversions}
+import scala.language.{reflectiveCalls, implicitConversions}
object File {
def pathSeparator = java.io.File.pathSeparator
diff --git a/src/reflect/scala/tools/nsc/io/Path.scala b/src/reflect/scala/tools/nsc/io/Path.scala
index e965c70111..0a27e49686 100644
--- a/src/reflect/scala/tools/nsc/io/Path.scala
+++ b/src/reflect/scala/tools/nsc/io/Path.scala
@@ -12,7 +12,7 @@ import java.io.{
import java.io.{ File => JFile }
import java.net.{ URI, URL }
import scala.util.Random.alphanumeric
-import language.implicitConversions
+import scala.language.implicitConversions
/** An abstraction for filesystem paths. The differences between
* Path, File, and Directory are primarily to communicate intent.
diff --git a/src/reflect/scala/tools/nsc/io/Streamable.scala b/src/reflect/scala/tools/nsc/io/Streamable.scala
index ff770bd396..625429bdb3 100644
--- a/src/reflect/scala/tools/nsc/io/Streamable.scala
+++ b/src/reflect/scala/tools/nsc/io/Streamable.scala
@@ -10,7 +10,7 @@ import java.net.{ URI, URL }
import java.io.{ BufferedInputStream, InputStream, PrintStream }
import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable }
import scala.io.{ Codec, BufferedSource, Source }
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
import Path.fail
/** Traits for objects which can be represented as Streams.
diff --git a/src/reflect/scala/tools/nsc/io/VirtualFile.scala b/src/reflect/scala/tools/nsc/io/VirtualFile.scala
index be888e92e6..8a5114bfe7 100644
--- a/src/reflect/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/reflect/scala/tools/nsc/io/VirtualFile.scala
@@ -33,7 +33,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
//########################################################################
// Private data
- private var content = new Array[Byte](0)
+ private var content = Array.emptyByteArray
//########################################################################
// Public Methods
diff --git a/src/reflect/scala/tools/nsc/io/ZipArchive.scala b/src/reflect/scala/tools/nsc/io/ZipArchive.scala
index d1a91294a5..49d2200895 100644
--- a/src/reflect/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/reflect/scala/tools/nsc/io/ZipArchive.scala
@@ -11,7 +11,7 @@ import java.io.{ IOException, InputStream, ByteArrayInputStream }
import java.io.{ File => JFile }
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
import scala.collection.{ immutable, mutable }
-import annotation.tailrec
+import scala.annotation.tailrec
/** An abstraction for zip files and streams. Everything is written the way
* it is for performance: we come through here a lot on every run. Be careful
@@ -107,14 +107,14 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
// })
dirs get path match {
case Some(v) => v
- case None =>
+ case None =>
val parent = ensureDir(dirs, dirName(path), null)
val dir = new DirEntry(path)
parent.entries(baseName(path)) = dir
dirs(path) = dir
dir
}
-
+
protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = {
if (entry.isDirectory) ensureDir(dirs, entry.getName, entry)
else ensureDir(dirs, dirName(entry.getName), null)
@@ -177,7 +177,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
class FileEntry() extends Entry(zipEntry.getName) {
override val toByteArray: Array[Byte] = {
val len = zipEntry.getSize().toInt
- val arr = new Array[Byte](len)
+ val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len)
var offset = 0
def loop() {
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 5ad82c513d..88ef8ae2a1 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -53,7 +53,7 @@ trait Commands extends Prop {
* takes the current abstract state as parameter and returns a boolean
* that says if the precondition is fulfilled or not. You can add several
* conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
+ val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
@@ -65,7 +65,7 @@ trait Commands extends Prop {
* method. The postcondition function should return a Boolean (or
* a Prop instance) that says if the condition holds or not. You can add several
* conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
+ val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
}
/** A command that binds its result for later use */
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index c40e4aa718..eeb5936086 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -96,7 +96,7 @@ object Pretty {
}
implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
+ def labels(ls: scala.collection.immutable.Set[String]) =
if(ls.isEmpty) ""
else "> Labels of failing property: " / ls.mkString("\n")
val s = res.status match {
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index 16ac1940b2..4683c34a65 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -30,7 +30,7 @@ trait CmdLineParser extends Parsers {
trait StrOpt extends Opt[String]
class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
+ private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
def apply(flag: Flag): Boolean = opts.contains(flag)
def apply[T](opt: Opt[T]): T = opts.get(opt) match {
case None => opt.default
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index f5db183abb..35de796727 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -13,7 +13,7 @@ import java.io._
class CodeWriter(writer: Writer) {
- private val nl = compat.Platform.EOL
+ private val nl = scala.compat.Platform.EOL
private var step = " "
private var level = 0
private var align = false
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 34f52a1e19..51a789e041 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
/** Repeats this rule num times */
def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
- val result = new collection.mutable.ArraySeq[A](num)
+ val result = new scala.collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
index 324e87435e..b1cc18f90b 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/package.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
@@ -2,8 +2,8 @@ package scala.tools.scalap
package scalax
package object rules {
- implicit lazy val higherKinds = language.higherKinds
- implicit lazy val postfixOps = language.postfixOps
- implicit lazy val implicitConversions = language.implicitConversions
- implicit lazy val reflectiveCalls = language.reflectiveCalls
+ implicit lazy val higherKinds = scala.language.higherKinds
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
}
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 96530e2e94..45497665d7 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -14,8 +14,8 @@ package object swing {
type Image = java.awt.Image
type Font = java.awt.Font
- implicit lazy val reflectiveCalls = language.reflectiveCalls
- implicit lazy val implicitConversions = language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
+ implicit lazy val implicitConversions = scala.language.implicitConversions
private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])