summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitattributes26
-rw-r--r--build.xml196
-rw-r--r--docs/LICENSE2
-rw-r--r--gitconfig.SAMPLE8
-rw-r--r--lib/scala-compiler-src.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--lib/scala-reflect-src.jar.desired.sha12
-rw-r--r--lib/scala-reflect.jar.desired.sha12
-rw-r--r--project/Build.scala2
-rw-r--r--project/Partest.scala10
-rw-r--r--project/Versions.scala2
-rw-r--r--project/plugins.sbt2
-rw-r--r--project/project/Build.scala2
-rw-r--r--src/actors-migration/scala/actors/migration/MigrationSystem.scala (renamed from src/actors-migration/scala/actors/MigrationSystem.scala)5
-rw-r--r--src/actors-migration/scala/actors/migration/Pattern.scala (renamed from src/actors-migration/scala/actors/Pattern.scala)13
-rw-r--r--src/actors-migration/scala/actors/migration/Props.scala (renamed from src/actors-migration/scala/actors/Props.scala)7
-rw-r--r--src/actors-migration/scala/actors/migration/StashingActor.scala (renamed from src/actors-migration/scala/actors/StashingActor.scala)13
-rw-r--r--src/actors-migration/scala/actors/migration/Timeout.scala (renamed from src/actors-migration/scala/actors/Timeout.scala)6
-rw-r--r--src/actors/scala/actors/AbstractActor.scala2
-rw-r--r--src/actors/scala/actors/Actor.scala2
-rw-r--r--src/actors/scala/actors/ActorRef.scala25
-rw-r--r--src/actors/scala/actors/CanReply.scala2
-rw-r--r--src/actors/scala/actors/Combinators.scala2
-rw-r--r--src/actors/scala/actors/Future.scala2
-rw-r--r--src/actors/scala/actors/Reactor.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala2
-rw-r--r--src/build/InnerObjectTestGen.scala2
-rw-r--r--src/build/bnd/continuations.bnd5
-rw-r--r--src/build/bnd/scala-actors-migration.bnd5
-rw-r--r--src/build/bnd/scala-actors.bnd5
-rw-r--r--src/build/bnd/scala-compiler.bnd8
-rw-r--r--src/build/bnd/scala-library.bnd6
-rw-r--r--src/build/bnd/scala-reflect.bnd6
-rw-r--r--src/build/bnd/scala-swing.bnd5
-rw-r--r--src/build/genprod.scala2
-rw-r--r--src/build/pack.xml39
-rw-r--r--src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala3
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Aliases.scala11
-rw-r--r--src/compiler/scala/reflect/macros/runtime/CapturedVariables.scala15
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Context.scala8
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Enclosures.scala5
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Evals.scala4
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Exprs.scala8
-rw-r--r--src/compiler/scala/reflect/macros/runtime/FrontEnds.scala33
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Infrastructure.scala50
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Parsers.scala11
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Reifiers.scala6
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Settings.scala36
-rw-r--r--src/compiler/scala/reflect/macros/runtime/TypeTags.scala9
-rw-r--r--src/compiler/scala/reflect/macros/util/Traces.scala9
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala4
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala12
-rw-r--r--src/compiler/scala/reflect/reify/States.scala8
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala16
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala66
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala5
-rw-r--r--src/compiler/scala/reflect/reify/package.scala25
-rw-r--r--src/compiler/scala/reflect/reify/phases/Calculate.scala2
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala4
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala15
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala48
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala11
-rw-r--r--src/compiler/scala/reflect/reify/utils/StdAttachments.scala10
-rw-r--r--src/compiler/scala/reflect/reify/utils/SymbolTables.scala58
-rw-r--r--src/compiler/scala/tools/ant/ClassloadVerify.scala2
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala13
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compilers.scala4
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl182
-rw-r--r--src/compiler/scala/tools/cmd/Property.scala2
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala11
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala2
-rw-r--r--src/compiler/scala/tools/cmd/package.scala4
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala70
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Phases.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala12
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala213
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala175
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala44
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala849
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala161
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala739
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala249
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala60
-rw-r--r--src/compiler/scala/tools/nsc/doc/Uncompilable.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala23
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala6
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Source.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala129
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js158
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js5442
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js42
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala9
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/LinkTo.scala8
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala133
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala25
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala8
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala54
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala16
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ContextTrees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala46
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala80
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/Tester.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala5
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala7
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Imports.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/NamedParam.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Phased.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/session/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala4
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala21
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala18
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala42
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/Mixer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala92
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala22
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala14
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala13
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala239
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala474
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala42
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala136
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala104
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala410
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala126
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala83
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala1417
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala24
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala84
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala570
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala66
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala183
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala23
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala1590
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/Exceptional.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/StatisticsInfo.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala2
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala25
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnd.scala50
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnds.scala78
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala7
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala10
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala23
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala104
-rw-r--r--src/compiler/scala/tools/reflect/package.scala94
-rw-r--r--src/compiler/scala/tools/util/Javap.scala2
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala2
-rw-r--r--src/compiler/scala/tools/util/VerifyClass.scala2
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala4
-rw-r--r--src/detach/library/scala/remoting/Channel.scala8
-rw-r--r--src/ensime/.ensime.SAMPLE17
-rw-r--r--src/ensime/README.md11
-rw-r--r--src/library/scala/App.scala10
-rw-r--r--src/library/scala/Array.scala14
-rw-r--r--src/library/scala/Boolean.scala2
-rw-r--r--src/library/scala/BoxingConversions.scala5
-rw-r--r--src/library/scala/Byte.scala2
-rw-r--r--src/library/scala/Char.scala2
-rw-r--r--src/library/scala/Double.scala2
-rw-r--r--src/library/scala/Dynamic.scala10
-rw-r--r--src/library/scala/Float.scala2
-rw-r--r--src/library/scala/Function.scala4
-rw-r--r--src/library/scala/Int.scala2
-rw-r--r--src/library/scala/Long.scala2
-rw-r--r--src/library/scala/LowPriorityImplicits.scala23
-rw-r--r--src/library/scala/Option.scala8
-rw-r--r--src/library/scala/PartialFunction.scala145
-rw-r--r--src/library/scala/Predef.scala8
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/SerialVersionUID.scala2
-rw-r--r--src/library/scala/Short.scala2
-rw-r--r--src/library/scala/StringContext.scala74
-rw-r--r--src/library/scala/Unit.scala2
-rw-r--r--src/library/scala/annotation/bridge.scala2
-rw-r--r--src/library/scala/annotation/cloneable.scala2
-rw-r--r--src/library/scala/annotation/elidable.scala8
-rw-r--r--src/library/scala/annotation/implicitNotFound.scala2
-rw-r--r--src/library/scala/annotation/meta/beanGetter.scala2
-rw-r--r--src/library/scala/annotation/meta/beanSetter.scala2
-rw-r--r--src/library/scala/annotation/meta/companionClass.scala4
-rw-r--r--src/library/scala/annotation/meta/companionMethod.scala4
-rw-r--r--src/library/scala/annotation/meta/companionObject.scala2
-rw-r--r--src/library/scala/annotation/meta/field.scala2
-rw-r--r--src/library/scala/annotation/meta/getter.scala2
-rw-r--r--src/library/scala/annotation/meta/languageFeature.scala2
-rw-r--r--src/library/scala/annotation/meta/param.scala2
-rw-r--r--src/library/scala/annotation/meta/setter.scala2
-rw-r--r--src/library/scala/annotation/migration.scala4
-rw-r--r--src/library/scala/annotation/serializable.scala2
-rw-r--r--src/library/scala/annotation/static.scala20
-rw-r--r--src/library/scala/annotation/strictfp.scala2
-rw-r--r--src/library/scala/annotation/switch.scala2
-rw-r--r--src/library/scala/annotation/tailrec.scala2
-rw-r--r--src/library/scala/annotation/unchecked/uncheckedStable.scala2
-rw-r--r--src/library/scala/annotation/unchecked/uncheckedVariance.scala2
-rw-r--r--src/library/scala/annotation/unspecialized.scala2
-rw-r--r--src/library/scala/annotation/varargs.scala2
-rw-r--r--src/library/scala/beans/BeanDescription.scala2
-rw-r--r--src/library/scala/beans/BeanDisplayName.scala2
-rw-r--r--src/library/scala/beans/BeanInfo.scala2
-rw-r--r--src/library/scala/beans/BeanInfoSkip.scala2
-rw-r--r--src/library/scala/beans/BeanProperty.scala2
-rw-r--r--src/library/scala/beans/BooleanBeanProperty.scala2
-rw-r--r--src/library/scala/collection/CustomParallelizable.scala2
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala1
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala12
-rw-r--r--src/library/scala/collection/GenSetLike.scala2
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala16
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala6
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala4
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala5
-rw-r--r--src/library/scala/collection/IterableLike.scala9
-rw-r--r--src/library/scala/collection/IterableViewLike.scala2
-rw-r--r--src/library/scala/collection/Iterator.scala11
-rw-r--r--src/library/scala/collection/JavaConversions.scala3
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala4
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala13
-rw-r--r--src/library/scala/collection/MapLike.scala12
-rw-r--r--src/library/scala/collection/SeqExtractors.scala2
-rw-r--r--src/library/scala/collection/SeqLike.scala12
-rw-r--r--src/library/scala/collection/SetLike.scala4
-rw-r--r--src/library/scala/collection/TraversableLike.scala33
-rw-r--r--src/library/scala/collection/TraversableOnce.scala16
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala17
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala146
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala6
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala2
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala6
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala24
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala11
-rw-r--r--src/library/scala/collection/generic/ClassTagTraversableFactory.scala4
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala2
-rw-r--r--src/library/scala/collection/generic/GenMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenSeqFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenericClassTagCompanion.scala4
-rw-r--r--src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala6
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericOrderedCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala4
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala2
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala6
-rw-r--r--src/library/scala/collection/generic/GenericSeqCompanion.scala4
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala10
-rw-r--r--src/library/scala/collection/generic/ImmutableMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala4
-rw-r--r--src/library/scala/collection/generic/IsTraversableOnce.scala4
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/OrderedTraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala10
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala4
-rw-r--r--src/library/scala/collection/generic/SliceInterval.scala2
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/package.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala135
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala61
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala16
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala16
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala20
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala5
-rw-r--r--src/library/scala/collection/immutable/Map.scala2
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala4
-rw-r--r--src/library/scala/collection/immutable/Queue.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala18
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala3
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala32
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala14
-rw-r--r--src/library/scala/collection/immutable/Stream.scala50
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala12
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala4
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala2
-rw-r--r--src/library/scala/collection/immutable/Vector.scala34
-rw-r--r--src/library/scala/collection/immutable/package.scala4
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala10
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala35
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala7
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala5
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala14
-rw-r--r--src/library/scala/collection/mutable/Builder.scala23
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala2
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala7
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala20
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala33
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala8
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala62
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala8
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala3
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala43
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala87
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala12
-rw-r--r--src/library/scala/collection/mutable/Map.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala7
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala5
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala22
-rw-r--r--src/library/scala/collection/mutable/Queue.scala18
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala7
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala2
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala2
-rw-r--r--src/library/scala/collection/mutable/Stack.scala6
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/Subscriber.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala8
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala2
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala24
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala49
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala37
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala3
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala18
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala17
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala10
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala51
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala32
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala18
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala16
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala4
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala14
-rw-r--r--src/library/scala/collection/parallel/package.scala19
-rw-r--r--src/library/scala/compat/Platform.scala2
-rw-r--r--src/library/scala/concurrent/Awaitable.scala25
-rw-r--r--src/library/scala/concurrent/BlockContext.scala3
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala1
-rw-r--r--src/library/scala/concurrent/Future.scala17
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala2
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadRunner.scala2
-rw-r--r--src/library/scala/concurrent/duration/Deadline.scala81
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala698
-rw-r--r--src/library/scala/concurrent/duration/DurationConversions.scala92
-rw-r--r--src/library/scala/concurrent/duration/package.scala75
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala1
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala56
-rw-r--r--src/library/scala/concurrent/package.scala35
-rw-r--r--src/library/scala/concurrent/util/Duration.scala537
-rw-r--r--src/library/scala/concurrent/util/duration/Classifier.scala9
-rw-r--r--src/library/scala/concurrent/util/duration/IntMult.scala18
-rw-r--r--src/library/scala/concurrent/util/duration/package.scala31
-rw-r--r--src/library/scala/deprecated.scala4
-rw-r--r--src/library/scala/deprecatedInheritance.scala22
-rw-r--r--src/library/scala/deprecatedName.scala4
-rw-r--r--src/library/scala/deprecatedOverriding.scala21
-rw-r--r--src/library/scala/inline.scala2
-rw-r--r--src/library/scala/io/BytePickle.scala1
-rw-r--r--src/library/scala/io/Codec.scala6
-rw-r--r--src/library/scala/io/Position.scala3
-rw-r--r--src/library/scala/io/UTF8Codec.scala1
-rw-r--r--src/library/scala/language.scala22
-rw-r--r--src/library/scala/languageFeature.scala9
-rw-r--r--src/library/scala/math/BigDecimal.scala5
-rw-r--r--src/library/scala/math/BigInt.scala7
-rw-r--r--src/library/scala/math/Fractional.scala4
-rw-r--r--src/library/scala/math/Integral.scala4
-rw-r--r--src/library/scala/math/Numeric.scala2
-rw-r--r--src/library/scala/math/Ordered.scala2
-rw-r--r--src/library/scala/math/Ordering.scala7
-rw-r--r--src/library/scala/math/ScalaNumericConversions.scala12
-rw-r--r--src/library/scala/native.scala2
-rw-r--r--src/library/scala/noinline.scala2
-rw-r--r--src/library/scala/reflect/ClassTag.scala105
-rw-r--r--src/library/scala/reflect/Manifest.scala23
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala3
-rw-r--r--src/library/scala/reflect/ScalaLongSignature.java3
-rw-r--r--src/library/scala/reflect/ScalaSignature.java2
-rw-r--r--src/library/scala/reflect/base/AnnotationInfos.scala44
-rw-r--r--src/library/scala/reflect/base/Attachments.scala42
-rw-r--r--src/library/scala/reflect/base/Base.scala778
-rw-r--r--src/library/scala/reflect/base/BuildUtils.scala90
-rw-r--r--src/library/scala/reflect/base/Constants.scala20
-rw-r--r--src/library/scala/reflect/base/Exprs.scala79
-rw-r--r--src/library/scala/reflect/base/FlagSets.scala23
-rw-r--r--src/library/scala/reflect/base/Mirrors.scala12
-rw-r--r--src/library/scala/reflect/base/Names.scala58
-rw-r--r--src/library/scala/reflect/base/Positions.scala22
-rw-r--r--src/library/scala/reflect/base/StandardDefinitions.scala74
-rw-r--r--src/library/scala/reflect/base/StandardNames.scala34
-rw-r--r--src/library/scala/reflect/base/Symbols.scala292
-rw-r--r--src/library/scala/reflect/base/TagInterop.scala18
-rw-r--r--src/library/scala/reflect/base/TreeCreator.scala6
-rw-r--r--src/library/scala/reflect/base/Trees.scala1459
-rw-r--r--src/library/scala/reflect/base/TypeCreator.scala6
-rw-r--r--src/library/scala/reflect/base/TypeTags.scala274
-rw-r--r--src/library/scala/reflect/base/Types.scala426
-rw-r--r--src/library/scala/reflect/base/Universe.scala66
-rw-r--r--src/library/scala/reflect/macros/internal/macroImpl.scala2
-rw-r--r--src/library/scala/reflect/macros/internal/package.scala15
-rw-r--r--src/library/scala/reflect/package.scala13
-rw-r--r--src/library/scala/remote.scala2
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala40
-rw-r--r--src/library/scala/runtime/RichBoolean.scala4
-rw-r--r--src/library/scala/runtime/RichByte.scala5
-rw-r--r--src/library/scala/runtime/RichChar.scala5
-rw-r--r--src/library/scala/runtime/RichDouble.scala9
-rw-r--r--src/library/scala/runtime/RichException.scala2
-rw-r--r--src/library/scala/runtime/RichFloat.scala9
-rw-r--r--src/library/scala/runtime/RichInt.scala4
-rw-r--r--src/library/scala/runtime/RichLong.scala5
-rw-r--r--src/library/scala/runtime/RichShort.scala5
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala21
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala78
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala5
-rw-r--r--src/library/scala/runtime/StringAdd.scala10
-rw-r--r--src/library/scala/runtime/StringFormat.scala10
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala51
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala64
-rw-r--r--src/library/scala/runtime/WorksheetSupport.scala6
-rw-r--r--src/library/scala/specialized.scala4
-rw-r--r--src/library/scala/sys/BooleanProp.scala2
-rw-r--r--src/library/scala/sys/Prop.scala3
-rw-r--r--src/library/scala/sys/SystemProperties.scala2
-rw-r--r--src/library/scala/sys/package.scala4
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/Process.scala2
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala4
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala2
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/testing/Benchmark.scala3
-rw-r--r--src/library/scala/testing/Show.scala1
-rw-r--r--src/library/scala/throws.scala2
-rw-r--r--src/library/scala/transient.scala4
-rw-r--r--src/library/scala/unchecked.scala2
-rw-r--r--src/library/scala/util/Either.scala2
-rw-r--r--src/library/scala/util/Marshal.scala2
-rw-r--r--src/library/scala/util/MurmurHash.scala2
-rw-r--r--src/library/scala/util/Random.scala6
-rw-r--r--src/library/scala/util/Sorting.scala5
-rw-r--r--src/library/scala/util/Try.scala67
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala4
-rw-r--r--src/library/scala/util/control/Exception.scala51
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala3
-rw-r--r--src/library/scala/util/hashing/Hashing.scala15
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala46
-rw-r--r--src/library/scala/util/logging/ConsoleLogger.scala1
-rw-r--r--src/library/scala/util/logging/Logged.scala1
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala24
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/testing/RegexTest.scala2
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala2
-rw-r--r--src/library/scala/volatile.scala4
-rwxr-xr-xsrc/library/scala/xml/Elem.scala2
-rw-r--r--src/library/scala/xml/Equality.scala2
-rw-r--r--src/library/scala/xml/MetaData.scala2
-rw-r--r--src/library/scala/xml/NodeSeq.scala4
-rwxr-xr-xsrc/library/scala/xml/Utility.scala2
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala6
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala15
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala4
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala2
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala2
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala4
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala4
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala2
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala4
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala2
-rw-r--r--src/partest/scala/tools/partest/MemoryTest.scala38
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala2
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala12
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala13
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala12
-rw-r--r--src/partest/scala/tools/partest/package.scala58
-rw-r--r--src/reflect/scala/reflect/api/AnnotationInfos.scala27
-rw-r--r--src/reflect/scala/reflect/api/Annotations.scala124
-rw-r--r--src/reflect/scala/reflect/api/Attachments.scala50
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala75
-rw-r--r--src/reflect/scala/reflect/api/Constants.scala27
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala143
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala84
-rw-r--r--src/reflect/scala/reflect/api/FrontEnds.scala72
-rw-r--r--src/reflect/scala/reflect/api/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/api/JavaUniverse.scala31
-rw-r--r--src/reflect/scala/reflect/api/Mirror.scala (renamed from src/library/scala/reflect/base/MirrorOf.scala)29
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala42
-rw-r--r--src/reflect/scala/reflect/api/Names.scala65
-rw-r--r--src/reflect/scala/reflect/api/Position.scala167
-rw-r--r--src/reflect/scala/reflect/api/Positions.scala194
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/api/Scopes.scala (renamed from src/library/scala/reflect/base/Scopes.scala)31
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala99
-rw-r--r--src/reflect/scala/reflect/api/StandardNames.scala26
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala392
-rw-r--r--src/reflect/scala/reflect/api/TagInterop.scala53
-rw-r--r--src/reflect/scala/reflect/api/TreeCreator.scala26
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala1428
-rw-r--r--src/reflect/scala/reflect/api/TypeCreator.scala26
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala352
-rw-r--r--src/reflect/scala/reflect/api/Types.scala470
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala75
-rw-r--r--src/reflect/scala/reflect/api/package.scala84
-rw-r--r--src/reflect/scala/reflect/internal/AbstractFileApi.scala7
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala102
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala8
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala17
-rw-r--r--src/reflect/scala/reflect/internal/Chars.scala6
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala43
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala219
-rw-r--r--src/reflect/scala/reflect/internal/FlagSets.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala28
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala10
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala193
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala17
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala25
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala11
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala4
-rw-r--r--src/reflect/scala/reflect/internal/StdCreators.scala12
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala17
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala374
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala5
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala45
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala219
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala785
-rw-r--r--src/reflect/scala/reflect/internal/package.scala6
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala33
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala24
-rw-r--r--src/reflect/scala/reflect/internal/transform/Transforms.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/HashSet.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala19
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala22
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala24
-rw-r--r--src/reflect/scala/reflect/internal/util/TableDef.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/ThreeValues.scala14
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala6
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala (renamed from src/reflect/scala/tools/nsc/io/AbstractFile.scala)10
-rw-r--r--src/reflect/scala/reflect/io/Directory.scala (renamed from src/reflect/scala/tools/nsc/io/Directory.scala)8
-rw-r--r--src/reflect/scala/reflect/io/File.scala (renamed from src/reflect/scala/tools/nsc/io/File.scala)10
-rw-r--r--src/reflect/scala/reflect/io/FileOperationException.scala (renamed from src/reflect/scala/tools/nsc/io/FileOperationException.scala)4
-rw-r--r--src/reflect/scala/reflect/io/NoAbstractFile.scala (renamed from src/reflect/scala/tools/nsc/io/NoAbstractFile.scala)4
-rw-r--r--src/reflect/scala/reflect/io/Path.scala (renamed from src/reflect/scala/tools/nsc/io/Path.scala)11
-rw-r--r--src/reflect/scala/reflect/io/PlainFile.scala (renamed from src/reflect/scala/tools/nsc/io/PlainFile.scala)8
-rw-r--r--src/reflect/scala/reflect/io/Streamable.scala (renamed from src/reflect/scala/tools/nsc/io/Streamable.scala)11
-rw-r--r--src/reflect/scala/reflect/io/VirtualDirectory.scala (renamed from src/reflect/scala/tools/nsc/io/VirtualDirectory.scala)4
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala (renamed from src/reflect/scala/tools/nsc/io/VirtualFile.scala)6
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala (renamed from src/reflect/scala/tools/nsc/io/ZipArchive.scala)44
-rw-r--r--src/reflect/scala/reflect/macros/Aliases.scala13
-rw-r--r--src/reflect/scala/reflect/macros/CapturedVariables.scala21
-rw-r--r--src/reflect/scala/reflect/macros/Context.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala4
-rw-r--r--src/reflect/scala/reflect/macros/Exprs.scala8
-rw-r--r--src/reflect/scala/reflect/macros/FrontEnds.scala14
-rw-r--r--src/reflect/scala/reflect/macros/Infrastructure.scala102
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala14
-rw-r--r--src/reflect/scala/reflect/macros/Reifiers.scala16
-rw-r--r--src/reflect/scala/reflect/macros/Settings.scala40
-rw-r--r--src/reflect/scala/reflect/macros/TreeBuilder.scala7
-rw-r--r--src/reflect/scala/reflect/macros/TypeTags.scala9
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala78
-rw-r--r--src/reflect/scala/reflect/macros/package.scala6
-rw-r--r--src/reflect/scala/reflect/runtime/AbstractFile.scala7
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala290
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala12
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala28
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala1
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala14
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala35
-rw-r--r--src/reflect/scala/reflect/runtime/TwoWayCache.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala7
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala2
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala2
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala17
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/package.scala8
-rw-r--r--src/swing/scala/swing/package.scala4
-rw-r--r--test/ant/test-basic/build.xml33
-rw-r--r--test/ant/test-basic/src/test-1.scala0
-rwxr-xr-xtest/disabled-windows/script/utf8.bat22
-rwxr-xr-xtest/disabled/script/fact.bat34
-rwxr-xr-xtest/disabled/script/second.bat6
-rwxr-xr-xtest/disabled/script/t1015.bat24
-rwxr-xr-xtest/disabled/script/t1017.bat30
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.check1
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.check1
-rw-r--r--test/files/disabled/A.scala (renamed from test/files/buildmanager/overloaded_1/A.scala)2
-rw-r--r--test/files/disabled/overloaded_1.check (renamed from test/files/buildmanager/overloaded_1/overloaded_1.check)0
-rw-r--r--test/files/disabled/overloaded_1.test (renamed from test/files/buildmanager/overloaded_1/overloaded_1.test)0
-rw-r--r--test/files/disabled/t4245/A.scala (renamed from test/files/buildmanager/t4245/A.scala)0
-rw-r--r--test/files/disabled/t4245/t4245.check (renamed from test/files/buildmanager/t4245/t4245.check)0
-rw-r--r--test/files/disabled/t4245/t4245.test (renamed from test/files/buildmanager/t4245/t4245.test)0
-rw-r--r--test/files/jvm/actmig-PinS.scala6
-rw-r--r--test/files/jvm/actmig-PinS_1.scala7
-rw-r--r--test/files/jvm/actmig-PinS_2.scala9
-rw-r--r--test/files/jvm/actmig-PinS_3.scala11
-rw-r--r--test/files/jvm/actmig-hierarchy.scala47
-rw-r--r--test/files/jvm/actmig-hierarchy_1.scala45
-rw-r--r--test/files/jvm/actmig-instantiation.scala96
-rw-r--r--test/files/jvm/actmig-loop-react.check1
-rw-r--r--test/files/jvm/actmig-loop-react.scala196
-rw-r--r--test/files/jvm/actmig-public-methods.check4
-rw-r--r--test/files/jvm/actmig-public-methods.scala74
-rw-r--r--test/files/jvm/actmig-public-methods_1.check4
-rw-r--r--test/files/jvm/actmig-public-methods_1.scala90
-rw-r--r--test/files/jvm/actmig-react-receive.scala11
-rw-r--r--test/files/jvm/actmig-react-within.check2
-rw-r--r--test/files/jvm/actmig-react-within.scala48
-rw-r--r--test/files/jvm/actmig-receive.check27
-rw-r--r--test/files/jvm/actmig-receive.scala120
-rw-r--r--test/files/jvm/duration-java.check364
-rw-r--r--test/files/jvm/duration-java/Test.java46
-rw-r--r--test/files/jvm/duration-tck.scala194
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala4
-rw-r--r--test/files/jvm/future-spec/PromiseTests.scala4
-rw-r--r--test/files/jvm/future-spec/TryTests.scala14
-rw-r--r--test/files/jvm/future-spec/main.scala2
-rw-r--r--test/files/jvm/manifests-new.check116
-rw-r--r--test/files/jvm/manifests-new.scala2
-rwxr-xr-xtest/files/jvm/mkLibNatives.bat140
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala31
-rw-r--r--test/files/jvm/serialization-new.check24
-rw-r--r--test/files/jvm/serialization-new.scala24
-rw-r--r--test/files/jvm/serialization.check24
-rw-r--r--test/files/jvm/serialization.scala24
-rw-r--r--test/files/lib/javac-artifacts.jar.desired.sha11
-rw-r--r--test/files/neg/any-vs-anyref.check64
-rw-r--r--test/files/neg/any-vs-anyref.scala29
-rw-r--r--test/files/neg/applydynamic_sip.check50
-rw-r--r--test/files/neg/applydynamic_sip.flags1
-rw-r--r--test/files/neg/applydynamic_sip.scala25
-rw-r--r--test/files/neg/checksensible.check200
-rw-r--r--test/files/neg/classmanifests_new_deprecations.check122
-rw-r--r--test/files/neg/classtags_contextbound_a.check8
-rw-r--r--test/files/neg/classtags_contextbound_b.check8
-rw-r--r--test/files/neg/classtags_contextbound_c.check8
-rw-r--r--test/files/neg/classtags_dont_use_typetags.check8
-rw-r--r--test/files/neg/deadline-inf-illegal.check15
-rw-r--r--test/files/neg/deadline-inf-illegal.scala8
-rw-r--r--test/files/neg/implicit-shadow.check11
-rw-r--r--test/files/neg/implicit-shadow.flags1
-rw-r--r--test/files/neg/implicit-shadow.scala13
-rw-r--r--test/files/neg/interop_abstypetags_arenot_classmanifests.check8
-rw-r--r--test/files/neg/interop_abstypetags_arenot_classmanifests.scala8
-rw-r--r--test/files/neg/interop_abstypetags_arenot_classtags.check8
-rw-r--r--test/files/neg/interop_abstypetags_arenot_classtags.scala8
-rw-r--r--test/files/neg/interop_abstypetags_arenot_manifests.check8
-rw-r--r--test/files/neg/interop_abstypetags_arenot_manifests.scala8
-rw-r--r--test/files/neg/interop_classmanifests_arenot_typetags.check8
-rw-r--r--test/files/neg/interop_classtags_arenot_manifests.check8
-rw-r--r--test/files/neg/interop_typetags_arenot_classmanifests.check8
-rw-r--r--test/files/neg/interop_typetags_arenot_classtags.check8
-rw-r--r--test/files/neg/interop_typetags_without_classtags_arenot_manifests.check12
-rw-r--r--test/files/neg/javaConversions-2.10-ambiguity.check6
-rw-r--r--test/files/neg/javaConversions-2.10-ambiguity.scala10
-rw-r--r--test/files/neg/macro-basic-mamdmi.check8
-rw-r--r--test/files/neg/macro-deprecate-idents.check104
-rw-r--r--test/files/neg/macro-invalidimpl-a.check8
-rw-r--r--test/files/neg/macro-invalidimpl-b.check8
-rw-r--r--test/files/neg/macro-invalidimpl-c.check8
-rw-r--r--test/files/neg/macro-invalidimpl-d.check8
-rw-r--r--test/files/neg/macro-invalidimpl-e.check26
-rw-r--r--test/files/neg/macro-invalidimpl-f.check14
-rw-r--r--test/files/neg/macro-invalidimpl-g.check14
-rw-r--r--test/files/neg/macro-invalidimpl-h.check8
-rw-r--r--test/files/neg/macro-invalidimpl-i.check4
-rw-r--r--test/files/neg/macro-invalidimpl-i.flags (renamed from test/files/neg/macro-invalidusage-badbounds.flags)0
-rw-r--r--test/files/neg/macro-invalidimpl-i/Impls_1.scala7
-rw-r--r--test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala5
-rw-r--r--test/files/neg/macro-invalidret-nontree.check14
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree.check14
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala3
-rw-r--r--test/files/neg/macro-invalidshape-a.check11
-rw-r--r--test/files/neg/macro-invalidshape-b.check11
-rw-r--r--test/files/neg/macro-invalidshape-c.check15
-rw-r--r--test/files/neg/macro-invalidshape-d.check16
-rw-r--r--test/files/neg/macro-invalidsig-context-bounds.check11
-rw-r--r--test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala2
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badargc.check14
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badtype.check14
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badvarargs.check14
-rw-r--r--test/files/neg/macro-invalidsig-ctx-noctx.check14
-rw-r--r--test/files/neg/macro-invalidsig-implicit-params.check11
-rw-r--r--test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala4
-rw-r--r--test/files/neg/macro-invalidsig-params-badargc.check14
-rw-r--r--test/files/neg/macro-invalidsig-params-badtype.check14
-rw-r--r--test/files/neg/macro-invalidsig-params-badvarargs.check14
-rw-r--r--test/files/neg/macro-invalidsig-params-namemismatch.check14
-rw-r--r--test/files/neg/macro-invalidsig-tparams-badtype.check14
-rw-r--r--test/files/neg/macro-invalidsig-tparams-bounds-a.check8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-bounds-b.check8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-a.check8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala2
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-b.check8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala6
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-c.check8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala6
-rw-r--r--test/files/neg/macro-invalidusage-badargs.check12
-rw-r--r--test/files/neg/macro-invalidusage-badbounds-a.check (renamed from test/files/neg/macro-invalidusage-badbounds.check)8
-rw-r--r--test/files/neg/macro-invalidusage-badbounds-a.flags (renamed from test/files/run/macro-def-path-dependent-d.flags)0
-rw-r--r--test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala (renamed from test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala)0
-rw-r--r--test/files/neg/macro-invalidusage-badtargs.check8
-rw-r--r--test/files/neg/macro-invalidusage-methodvaluesyntax.check8
-rw-r--r--test/files/neg/macro-noexpand.check8
-rw-r--r--test/files/neg/macro-nontypeablebody.check8
-rw-r--r--test/files/neg/macro-override-macro-overrides-abstract-method-a.check10
-rw-r--r--test/files/neg/macro-override-macro-overrides-abstract-method-b.check10
-rw-r--r--test/files/neg/macro-override-method-overrides-macro.check10
-rw-r--r--test/files/neg/macro-reify-typetag-hktypeparams-notags.check14
-rw-r--r--test/files/neg/macro-reify-typetag-typeparams-notags.check14
-rw-r--r--test/files/neg/macro-reify-typetag-useabstypetag.check14
-rw-r--r--test/files/neg/macro-reify-typetag-useabstypetag/Test.scala2
-rw-r--r--test/files/neg/macro-without-xmacros-a.check34
-rw-r--r--test/files/neg/macro-without-xmacros-b.check34
-rw-r--r--test/files/neg/names-defaults-neg.check2
-rw-r--r--test/files/neg/newpat_unreachable.check27
-rw-r--r--test/files/neg/newpat_unreachable.flags1
-rw-r--r--test/files/neg/newpat_unreachable.scala29
-rw-r--r--test/files/neg/no-implicit-to-anyref.check28
-rw-r--r--test/files/neg/no-implicit-to-anyref.scala29
-rw-r--r--test/files/neg/not-possible-cause.check9
-rw-r--r--test/files/neg/not-possible-cause.scala3
-rw-r--r--test/files/neg/override.check2
-rwxr-xr-xtest/files/neg/override.scala2
-rw-r--r--test/files/neg/pat_unreachable.check8
-rw-r--r--test/files/neg/pat_unreachable.scala8
-rw-r--r--test/files/neg/reify_ann2b.check8
-rw-r--r--test/files/neg/reify_ann2b.scala2
-rw-r--r--test/files/neg/reify_metalevel_breach_+0_refers_to_1.check14
-rw-r--r--test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala2
-rw-r--r--test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check14
-rw-r--r--test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala2
-rw-r--r--test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check14
-rw-r--r--test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala2
-rw-r--r--test/files/neg/reify_metalevel_breach_-1_refers_to_1.check14
-rw-r--r--test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala2
-rw-r--r--test/files/neg/reify_nested_inner_refers_to_local.check14
-rw-r--r--test/files/neg/reify_nested_inner_refers_to_local.scala2
-rw-r--r--test/files/neg/static-annot.check19
-rw-r--r--test/files/neg/static-annot.scala47
-rw-r--r--test/files/neg/structural.check18
-rw-r--r--test/files/neg/t0565.check2
-rw-r--r--test/files/neg/t2144.check2
-rw-r--r--test/files/neg/t2296a.check5
-rw-r--r--test/files/neg/t2296a/J.java (renamed from test/files/run/t2296a/J.java)0
-rw-r--r--test/files/neg/t2296a/S.scala (renamed from test/files/run/t2296a/S.scala)0
-rw-r--r--test/files/neg/t2296b.check5
-rw-r--r--test/files/neg/t2296b/J_1.java (renamed from test/files/run/t2296b/J_1.java)0
-rw-r--r--test/files/neg/t2296b/S_2.scala (renamed from test/files/run/t2296b/S_2.scala)0
-rw-r--r--test/files/neg/t2775.check8
-rw-r--r--test/files/neg/t3507-old.check8
-rw-r--r--test/files/neg/t3614.check4
-rw-r--r--test/files/neg/t4425.check5
-rw-r--r--test/files/neg/t4425.flags1
-rw-r--r--test/files/neg/t5031.check6
-rw-r--r--test/files/neg/t5031b.check5
-rw-r--r--test/files/neg/t5031b/a.scala3
-rw-r--r--test/files/neg/t5031b/b.scala3
-rw-r--r--test/files/neg/t5148.check5
-rw-r--r--test/files/neg/t5452-new.check16
-rw-r--r--test/files/neg/t5452-old.check16
-rw-r--r--test/files/neg/t5510.check2
-rw-r--r--test/files/neg/t5687.check8
-rw-r--r--test/files/neg/t5687.scala55
-rw-r--r--test/files/neg/t5689.check14
-rw-r--r--test/files/neg/t5692a.check4
-rw-r--r--test/files/neg/t5692a.flags1
-rw-r--r--test/files/neg/t5692a/Macros_1.scala6
-rw-r--r--test/files/neg/t5692a/Test_2.scala3
-rw-r--r--test/files/neg/t5692b.check4
-rw-r--r--test/files/neg/t5692b.flags1
-rw-r--r--test/files/neg/t5692b/Macros_1.scala6
-rw-r--r--test/files/neg/t5692b/Test_2.scala3
-rw-r--r--test/files/neg/t5878.check16
-rw-r--r--test/files/neg/t6040.check2
-rw-r--r--test/files/neg/t6048.check7
-rw-r--r--test/files/neg/t6162-inheritance.check10
-rw-r--r--test/files/neg/t6162-inheritance.flags1
-rw-r--r--test/files/neg/t6162-inheritance.scala19
-rw-r--r--test/files/neg/t6162-overriding.check7
-rw-r--r--test/files/neg/t6162-overriding.flags1
-rw-r--r--test/files/neg/t6162-overriding.scala17
-rw-r--r--test/files/neg/t6214.check4
-rw-r--r--test/files/neg/t6214.scala7
-rw-r--r--test/files/neg/t6227.check4
-rw-r--r--test/files/neg/t6227.scala6
-rw-r--r--test/files/neg/t6258.check16
-rw-r--r--test/files/neg/t6258.scala25
-rw-r--r--test/files/neg/t6260.check13
-rw-r--r--test/files/neg/t6260.scala17
-rw-r--r--test/files/neg/t6263.check9
-rw-r--r--test/files/neg/t6263.scala6
-rw-r--r--test/files/neg/t6264.check4
-rw-r--r--test/files/neg/t6264.flags1
-rw-r--r--test/files/neg/t6264.scala6
-rw-r--r--test/files/neg/t6276.check19
-rw-r--r--test/files/neg/t6276.flags1
-rw-r--r--test/files/neg/t6276.scala44
-rw-r--r--test/files/neg/t6283.check4
-rw-r--r--test/files/neg/t6283.scala1
-rw-r--r--test/files/neg/t6323a.check9
-rw-r--r--test/files/neg/t6323a.flags1
-rw-r--r--test/files/neg/t6323a.scala21
-rw-r--r--test/files/neg/t6335.check9
-rw-r--r--test/files/neg/t6335.scala7
-rw-r--r--test/files/neg/t6336.check7
-rw-r--r--test/files/neg/t6336.scala12
-rw-r--r--test/files/neg/t6337.check7
-rw-r--r--test/files/neg/t6337.scala21
-rw-r--r--test/files/neg/t6340.check10
-rw-r--r--test/files/neg/t6340.scala21
-rw-r--r--test/files/neg/t6359.check7
-rw-r--r--test/files/neg/t6359.scala8
-rw-r--r--test/files/neg/t6385.check7
-rw-r--r--test/files/neg/t6385.scala13
-rw-r--r--test/files/neg/t900.check7
-rw-r--r--test/files/neg/unit2anyref.check2
-rw-r--r--test/files/neg/valueclasses-pavlov.check7
-rw-r--r--test/files/neg/valueclasses-pavlov.scala23
-rw-r--r--test/files/neg/wrong-args-for-none.check4
-rw-r--r--test/files/neg/wrong-args-for-none.scala6
-rw-r--r--test/files/pos/SI-5788.scala5
-rw-r--r--test/files/pos/hk-match/a.scala5
-rw-r--r--test/files/pos/hk-match/b.scala1
-rw-r--r--test/files/pos/inline-access-levels.flags1
-rw-r--r--test/files/pos/inline-access-levels/A_1.scala10
-rw-r--r--test/files/pos/inline-access-levels/Test_2.scala11
-rw-r--r--test/files/pos/javaConversions-2.10-regression.scala17
-rw-r--r--test/files/pos/specializes-sym-crash.scala26
-rw-r--r--test/files/pos/t4579.scala16
-rw-r--r--test/files/pos/t5245.scala2
-rw-r--r--test/files/pos/t5667.scala2
-rw-r--r--test/files/pos/t5756.scala6
-rw-r--r--test/files/pos/t6022b.scala20
-rw-r--r--test/files/pos/t6034.scala1
-rw-r--r--test/files/pos/t6047.scala6
-rw-r--r--test/files/pos/t6145.scala11
-rw-r--r--test/files/pos/t6184.scala7
-rw-r--r--test/files/pos/t6201.scala13
-rw-r--r--test/files/pos/t6204-a.scala9
-rw-r--r--test/files/pos/t6204-b.scala10
-rw-r--r--test/files/pos/t6205.scala18
-rw-r--r--test/files/pos/t6208.scala4
-rw-r--r--test/files/pos/t6245/Base.java5
-rw-r--r--test/files/pos/t6245/Foo.scala9
-rw-r--r--test/files/pos/t6245/Vis.java3
-rw-r--r--test/files/pos/t6259.scala47
-rw-r--r--test/files/pos/t6274.scala13
-rw-r--r--test/files/pos/t6278-synth-def.scala30
-rw-r--r--test/files/pos/t6335.scala25
-rw-r--r--test/files/pos/t6367.scala34
-rw-r--r--test/files/pos/typetags.scala26
-rw-r--r--test/files/pos/z1720.scala16
-rw-r--r--test/files/presentation/recursive-ask.check4
-rw-r--r--test/files/presentation/recursive-ask/RecursiveAsk.scala20
-rw-r--r--test/files/run/Meter.scala11
-rw-r--r--test/files/run/MeterCaseClass.scala11
-rw-r--r--test/files/run/abstypetags_core.check60
-rw-r--r--test/files/run/abstypetags_core.scala60
-rw-r--r--test/files/run/abstypetags_serialize.check4
-rw-r--r--test/files/run/abstypetags_serialize.scala9
-rw-r--r--test/files/run/applydynamic_sip.check7
-rw-r--r--test/files/run/applydynamic_sip.scala10
-rw-r--r--test/files/run/classmanifests_new_alias.check4
-rw-r--r--test/files/run/classmanifests_new_core.check4
-rw-r--r--test/files/run/classtags_contextbound.check2
-rw-r--r--test/files/run/classtags_core.check30
-rw-r--r--test/files/run/classtags_multi.check10
-rw-r--r--test/files/run/collections.check8
-rw-r--r--test/files/run/collections.scala2
-rw-r--r--test/files/run/colltest.check1
-rw-r--r--test/files/run/colltest.scala3
-rw-r--r--test/files/run/colltest1.check2
-rw-r--r--test/files/run/colltest1.scala2
-rw-r--r--test/files/run/compiler-asSeenFrom.check12
-rw-r--r--test/files/run/constant-type.check30
-rw-r--r--test/files/run/constant-type.scala17
-rw-r--r--test/files/run/dead-code-elimination.check0
-rw-r--r--test/files/run/dead-code-elimination.flags1
-rw-r--r--test/files/run/dead-code-elimination.scala33
-rw-r--r--test/files/run/dynamic-anyval.check4
-rw-r--r--test/files/run/dynamic-anyval.scala22
-rw-r--r--test/files/run/empty-array.check3
-rw-r--r--test/files/run/empty-array.scala8
-rw-r--r--test/files/run/existentials3-new.check48
-rw-r--r--test/files/run/existentials3-new.scala20
-rw-r--r--test/files/run/exprs_serialize.check4
-rw-r--r--test/files/run/exprs_serialize.scala3
-rw-r--r--test/files/run/freetypes_false_alarm1.check2
-rw-r--r--test/files/run/getClassTest-valueClass.check2
-rw-r--r--test/files/run/getClassTest-valueClass.scala10
-rw-r--r--test/files/run/inline-ex-handlers.check187
-rw-r--r--test/files/run/interop_classtags_are_classmanifests.check6
-rw-r--r--test/files/run/interop_manifests_are_abstypetags.check6
-rw-r--r--test/files/run/interop_manifests_are_abstypetags.scala10
-rw-r--r--test/files/run/interop_manifests_are_classtags.check36
-rw-r--r--test/files/run/interop_manifests_are_typetags.check6
-rw-r--r--test/files/run/interop_typetags_are_manifests.check6
-rw-r--r--test/files/run/macro-abort-fresh.check12
-rw-r--r--test/files/run/macro-abort-fresh/Test_2.scala2
-rw-r--r--test/files/run/macro-declared-in-annotation.check2
-rw-r--r--test/files/run/macro-declared-in-block.check4
-rw-r--r--test/files/run/macro-declared-in-class-class.check4
-rw-r--r--test/files/run/macro-declared-in-class-object.check4
-rw-r--r--test/files/run/macro-declared-in-class.check4
-rw-r--r--test/files/run/macro-declared-in-default-param.check10
-rw-r--r--test/files/run/macro-declared-in-implicit-class.check4
-rw-r--r--test/files/run/macro-declared-in-method.check4
-rw-r--r--test/files/run/macro-declared-in-object-class.check4
-rw-r--r--test/files/run/macro-declared-in-object-object.check4
-rw-r--r--test/files/run/macro-declared-in-object.check4
-rw-r--r--test/files/run/macro-declared-in-package-object.check4
-rw-r--r--test/files/run/macro-declared-in-refinement.check4
-rw-r--r--test/files/run/macro-declared-in-trait.check30
-rw-r--r--test/files/run/macro-def-infer-return-type-b.check12
-rw-r--r--test/files/run/macro-def-infer-return-type-b/Test_2.scala2
-rw-r--r--test/files/run/macro-def-path-dependent-a.check2
-rw-r--r--test/files/run/macro-def-path-dependent-b.check2
-rw-r--r--test/files/run/macro-def-path-dependent-c.check2
-rw-r--r--test/files/run/macro-def-path-dependent-d1.check (renamed from test/files/run/macro-def-path-dependent-d.check)2
-rw-r--r--test/files/run/macro-def-path-dependent-d1.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala (renamed from test/files/run/macro-def-path-dependent-d/Impls_Macros_1.scala)2
-rw-r--r--test/files/run/macro-def-path-dependent-d1/Test_2.scala (renamed from test/files/run/macro-def-path-dependent-d/Test_2.scala)0
-rw-r--r--test/files/run/macro-def-path-dependent-d2.check1
-rw-r--r--test/files/run/macro-def-path-dependent-d2.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-d2/Impls_1.scala7
-rw-r--r--test/files/run/macro-def-path-dependent-d2/Macros_2.scala7
-rw-r--r--test/files/run/macro-def-path-dependent-d2/Test_3.scala3
-rw-r--r--test/files/run/macro-expand-implicit-argument.check1
-rw-r--r--test/files/run/macro-expand-implicit-argument.flags1
-rw-r--r--test/files/run/macro-expand-implicit-argument/Macros_1.scala59
-rw-r--r--test/files/run/macro-expand-implicit-argument/Test_2.scala4
-rw-r--r--test/files/run/macro-expand-implicit-macro-has-implicit.check2
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-implicit.check4
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-val.check2
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-view.check2
-rw-r--r--test/files/run/macro-expand-nullary-generic.check12
-rw-r--r--test/files/run/macro-expand-nullary-generic/Impls_1.scala12
-rw-r--r--test/files/run/macro-expand-overload.check12
-rw-r--r--test/files/run/macro-expand-override.check30
-rw-r--r--test/files/run/macro-expand-recursive.check2
-rw-r--r--test/files/run/macro-expand-tparams-explicit.check2
-rw-r--r--test/files/run/macro-expand-tparams-explicit/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-implicit.check4
-rw-r--r--test/files/run/macro-expand-tparams-implicit/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-optional.check2
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a.check8
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b.check4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1.check6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2.check6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1.check6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check6
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala2
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check2
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-varargs.check2
-rw-r--r--test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check2
-rw-r--r--test/files/run/macro-expand-varargs-implicit-over-varargs.check2
-rw-r--r--test/files/run/macro-impl-default-params.check10
-rw-r--r--test/files/run/macro-impl-default-params/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-impl-rename-context.check4
-rw-r--r--test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check6
-rw-r--r--test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidret-nontypeable.check6
-rw-r--r--test/files/run/macro-invalidret-nontypeable/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidusage-badret.check6
-rw-r--r--test/files/run/macro-invalidusage-badret/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication-with-tparams.check6
-rw-r--r--test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication.check6
-rw-r--r--test/files/run/macro-invalidusage-partialapplication/Test_2.scala2
-rw-r--r--test/files/run/macro-openmacros.check6
-rw-r--r--test/files/run/macro-openmacros/Impls_Macros_1.scala3
-rw-r--r--test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala2
-rw-r--r--test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala2
-rw-r--r--test/files/run/macro-reify-abstypetag-notypeparams.check4
-rw-r--r--test/files/run/macro-reify-abstypetag-notypeparams/Test.scala4
-rw-r--r--test/files/run/macro-reify-abstypetag-typeparams-notags.check4
-rw-r--r--test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala4
-rw-r--r--test/files/run/macro-reify-abstypetag-typeparams-tags.check4
-rw-r--r--test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala6
-rw-r--r--test/files/run/macro-reify-abstypetag-usetypetag.check4
-rw-r--r--test/files/run/macro-reify-abstypetag-usetypetag/Test.scala4
-rw-r--r--test/files/run/macro-reify-basic.check2
-rw-r--r--test/files/run/macro-reify-freevars.check6
-rw-r--r--test/files/run/macro-reify-freevars/Macros_1.scala2
-rw-r--r--test/files/run/macro-reify-freevars/Test_2.scala2
-rw-r--r--test/files/run/macro-reify-nested-a/Impls_Macros_1.scala9
-rw-r--r--test/files/run/macro-reify-nested-b/Impls_Macros_1.scala9
-rw-r--r--test/files/run/macro-reify-ref-to-packageless.check2
-rw-r--r--test/files/run/macro-reify-splice-outside-reify.check2
-rw-r--r--test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala13
-rw-r--r--test/files/run/macro-reify-splice-outside-reify/Test_2.scala2
-rw-r--r--test/files/run/macro-reify-splice-splice.check2
-rw-r--r--test/files/run/macro-reify-tagful-a.check2
-rw-r--r--test/files/run/macro-reify-tagful-a/Macros_1.scala2
-rw-r--r--test/files/run/macro-reify-tagless-a.check6
-rw-r--r--test/files/run/macro-reify-tagless-a/Test_2.scala2
-rw-r--r--test/files/run/macro-reify-typetag-notypeparams.check4
-rw-r--r--test/files/run/macro-reify-typetag-typeparams-tags.check4
-rw-r--r--test/files/run/macro-reify-unreify.check2
-rw-r--r--test/files/run/macro-reify-unreify/Macros_1.scala5
-rw-r--r--test/files/run/macro-repl-basic.check108
-rw-r--r--test/files/run/macro-repl-dontexpand.check24
-rw-r--r--test/files/run/macro-settings.check2
-rw-r--r--test/files/run/macro-sip19-revised.check10
-rw-r--r--test/files/run/macro-sip19-revised/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-sip19.check10
-rw-r--r--test/files/run/macro-sip19/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-typecheck-implicitsdisabled.check4
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled.check64
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala3
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled2.check64
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala3
-rw-r--r--test/files/run/macro-undetparams-consfromsls.check10
-rw-r--r--test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala8
-rw-r--r--test/files/run/macro-undetparams-implicitval.check2
-rw-r--r--test/files/run/macro-undetparams-macroitself.check4
-rw-r--r--test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala2
-rw-r--r--test/files/run/newTags.check8
-rw-r--r--test/files/run/newTags.scala9
-rw-r--r--test/files/run/partialfun.check6
-rw-r--r--test/files/run/partialfun.scala86
-rw-r--r--test/files/run/primitive-sigs-2-new.check14
-rw-r--r--test/files/run/primitive-sigs-2-old.check14
-rw-r--r--test/files/run/pure-args-byname-noinline.check12
-rw-r--r--test/files/run/pure-args-byname-noinline.scala33
-rw-r--r--test/files/run/reflection-allmirrors-tostring.check28
-rw-r--r--test/files/run/reflection-constructormirror-inner-badpath.check4
-rw-r--r--test/files/run/reflection-constructormirror-inner-good.check2
-rw-r--r--test/files/run/reflection-constructormirror-nested-badpath.check4
-rw-r--r--test/files/run/reflection-constructormirror-nested-good.check2
-rw-r--r--test/files/run/reflection-constructormirror-toplevel-badpath.check4
-rw-r--r--test/files/run/reflection-constructormirror-toplevel-good.check2
-rw-r--r--test/files/run/reflection-enclosed-basic.check36
-rw-r--r--test/files/run/reflection-enclosed-inner-basic.check40
-rw-r--r--test/files/run/reflection-enclosed-inner-inner-basic.check40
-rw-r--r--test/files/run/reflection-equality.check106
-rw-r--r--test/files/run/reflection-fieldmirror-accessorsareokay.check12
-rw-r--r--test/files/run/reflection-fieldmirror-accessorsareokay.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-ctorparam.check2
-rw-r--r--test/files/run/reflection-fieldmirror-getsetval.check4
-rw-r--r--test/files/run/reflection-fieldmirror-getsetvar.check4
-rw-r--r--test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check2
-rw-r--r--test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-privatethis.check6
-rw-r--r--test/files/run/reflection-fieldmirror-privatethis.scala2
-rw-r--r--test/files/run/reflection-fieldsymbol-navigation.check12
-rw-r--r--test/files/run/reflection-fieldsymbol-navigation.scala4
-rw-r--r--test/files/run/reflection-implicit.check2
-rw-r--r--test/files/run/reflection-implicit.scala15
-rw-r--r--test/files/run/reflection-java-annotations.check1
-rw-r--r--test/files/run/reflection-java-annotations.scala7
-rw-r--r--test/files/run/reflection-java-crtp.check1
-rw-r--r--test/files/run/reflection-java-crtp.scala8
-rw-r--r--test/files/run/reflection-magicsymbols-invoke.check236
-rw-r--r--test/files/run/reflection-magicsymbols-invoke.scala2
-rw-r--r--test/files/run/reflection-magicsymbols-repl.check78
-rw-r--r--test/files/run/reflection-magicsymbols-vanilla.check16
-rw-r--r--test/files/run/reflection-mem-glbs.scala13
-rw-r--r--test/files/run/reflection-mem-tags.scala17
-rw-r--r--test/files/run/reflection-mem-typecheck.scala26
-rw-r--r--test/files/run/reflection-methodsymbol-params.check16
-rw-r--r--test/files/run/reflection-methodsymbol-returntype.check16
-rw-r--r--test/files/run/reflection-methodsymbol-typeparams.check16
-rw-r--r--test/files/run/reflection-modulemirror-inner-badpath.check4
-rw-r--r--test/files/run/reflection-modulemirror-inner-good.check2
-rw-r--r--test/files/run/reflection-modulemirror-nested-badpath.check4
-rw-r--r--test/files/run/reflection-modulemirror-nested-good.check2
-rw-r--r--test/files/run/reflection-modulemirror-toplevel-badpath.check4
-rw-r--r--test/files/run/reflection-repl-classes.check70
-rw-r--r--test/files/run/reflection-repl-elementary.check18
-rw-r--r--test/files/run/reflection-sorted-decls.check10
-rw-r--r--test/files/run/reflection-sorted-decls.scala2
-rw-r--r--test/files/run/reflection-sorted-members.check38
-rw-r--r--test/files/run/reflection-sorted-members.scala2
-rw-r--r--test/files/run/reflection-valueclasses-derived.check6
-rw-r--r--test/files/run/reflection-valueclasses-standard.check54
-rw-r--r--test/files/run/reify-aliases.check2
-rw-r--r--test/files/run/reify-repl-fail-gracefully.check42
-rw-r--r--test/files/run/reify-staticXXX.check48
-rw-r--r--test/files/run/reify_ann1a.check60
-rw-r--r--test/files/run/reify_ann1a.scala2
-rw-r--r--test/files/run/reify_ann1b.check60
-rw-r--r--test/files/run/reify_ann1b.scala2
-rw-r--r--test/files/run/reify_ann2a.check88
-rw-r--r--test/files/run/reify_ann2a.scala2
-rw-r--r--test/files/run/reify_ann3.check42
-rw-r--r--test/files/run/reify_ann3.scala2
-rw-r--r--test/files/run/reify_ann4.check64
-rw-r--r--test/files/run/reify_ann4.scala2
-rw-r--r--test/files/run/reify_ann5.check44
-rw-r--r--test/files/run/reify_ann5.scala2
-rw-r--r--test/files/run/reify_classfileann_a.check36
-rw-r--r--test/files/run/reify_classfileann_a.scala2
-rw-r--r--test/files/run/reify_classfileann_b.check40
-rw-r--r--test/files/run/reify_classfileann_b.scala2
-rw-r--r--test/files/run/reify_closure1.scala2
-rw-r--r--test/files/run/reify_closure2a.scala2
-rw-r--r--test/files/run/reify_closure3a.scala2
-rw-r--r--test/files/run/reify_closure4a.scala2
-rw-r--r--test/files/run/reify_closure5a.scala2
-rw-r--r--test/files/run/reify_closure6.scala2
-rw-r--r--test/files/run/reify_closure7.scala2
-rw-r--r--test/files/run/reify_closure8a.scala2
-rw-r--r--test/files/run/reify_closure8b.check6
-rw-r--r--test/files/run/reify_closure8b.scala2
-rw-r--r--test/files/run/reify_closures10.scala2
-rw-r--r--test/files/run/reify_copypaste1.check4
-rw-r--r--test/files/run/reify_copypaste1.scala6
-rw-r--r--test/files/run/reify_getter.scala2
-rw-r--r--test/files/run/reify_metalevel_breach_+0_refers_to_1.scala2
-rw-r--r--test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala2
-rw-r--r--test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala2
-rw-r--r--test/files/run/reify_metalevel_breach_-1_refers_to_1.scala2
-rw-r--r--test/files/run/reify_nested_inner_refers_to_global.check2
-rw-r--r--test/files/run/reify_nested_inner_refers_to_global.scala2
-rw-r--r--test/files/run/reify_nested_inner_refers_to_local.scala2
-rw-r--r--test/files/run/reify_nested_outer_refers_to_global.check2
-rw-r--r--test/files/run/reify_nested_outer_refers_to_global.scala2
-rw-r--r--test/files/run/reify_nested_outer_refers_to_local.check2
-rw-r--r--test/files/run/reify_nested_outer_refers_to_local.scala2
-rw-r--r--test/files/run/reify_newimpl_11.check4
-rw-r--r--test/files/run/reify_newimpl_13.check4
-rw-r--r--test/files/run/reify_newimpl_18.check2
-rw-r--r--test/files/run/reify_newimpl_19.check4
-rw-r--r--test/files/run/reify_newimpl_20.check2
-rw-r--r--test/files/run/reify_newimpl_21.check2
-rw-r--r--test/files/run/reify_newimpl_22.check58
-rw-r--r--test/files/run/reify_newimpl_23.check56
-rw-r--r--test/files/run/reify_newimpl_25.check38
-rw-r--r--test/files/run/reify_newimpl_26.check42
-rw-r--r--test/files/run/reify_newimpl_26.scala2
-rw-r--r--test/files/run/reify_newimpl_27.check2
-rw-r--r--test/files/run/reify_newimpl_29.check2
-rw-r--r--test/files/run/reify_newimpl_30.check2
-rw-r--r--test/files/run/reify_newimpl_31.check2
-rw-r--r--test/files/run/reify_newimpl_33.check2
-rw-r--r--test/files/run/reify_newimpl_35.check34
-rw-r--r--test/files/run/reify_newimpl_36.check2
-rw-r--r--test/files/run/reify_newimpl_37.check2
-rw-r--r--test/files/run/reify_newimpl_38.check2
-rw-r--r--test/files/run/reify_newimpl_39.check2
-rw-r--r--test/files/run/reify_newimpl_40.check2
-rw-r--r--test/files/run/reify_newimpl_45.scala2
-rw-r--r--test/files/run/reify_printf.scala2
-rw-r--r--test/files/run/reify_typerefs_1a.scala2
-rw-r--r--test/files/run/reify_typerefs_1b.scala2
-rw-r--r--test/files/run/reify_typerefs_2a.scala2
-rw-r--r--test/files/run/reify_typerefs_2b.scala2
-rw-r--r--test/files/run/reify_typerefs_3a.scala2
-rw-r--r--test/files/run/reify_typerefs_3b.scala2
-rw-r--r--test/files/run/repl-power.check64
-rw-r--r--test/files/run/richWrapperEquals.scala4
-rw-r--r--test/files/run/runtimeEval2.check2
-rw-r--r--test/files/run/showraw_mods.check2
-rw-r--r--test/files/run/showraw_tree.check4
-rw-r--r--test/files/run/showraw_tree_ids.check4
-rw-r--r--test/files/run/showraw_tree_kinds.check4
-rw-r--r--test/files/run/showraw_tree_types_ids.check20
-rw-r--r--test/files/run/showraw_tree_types_typed.check20
-rw-r--r--test/files/run/showraw_tree_types_untyped.check4
-rw-r--r--test/files/run/showraw_tree_ultimate.check20
-rw-r--r--test/files/run/static-annot/field.scala243
-rw-r--r--test/files/run/stream-stack-overflow-filter-map.scala44
-rw-r--r--test/files/run/t1195-new.check12
-rw-r--r--test/files/run/t1195-new.scala2
-rw-r--r--test/files/run/t1195-old.check12
-rw-r--r--test/files/run/t1987b.check1
-rw-r--r--test/files/run/t1987b/PullIteratees.scala17
-rw-r--r--test/files/run/t1987b/a.scala6
-rw-r--r--test/files/run/t1987b/cce_test.scala15
-rw-r--r--test/files/run/t1987b/pkg1.scala4
-rw-r--r--test/files/run/t1987b/pkg2.scala3
-rw-r--r--test/files/run/t2296a.check2
-rw-r--r--test/files/run/t2296b.check2
-rw-r--r--test/files/run/t2386-new.check4
-rw-r--r--test/files/run/t2886.check10
-rw-r--r--test/files/run/t3507-new.check2
-rw-r--r--test/files/run/t4110-new.check4
-rw-r--r--test/files/run/t4216.check74
-rw-r--r--test/files/run/t4813.scala37
-rw-r--r--test/files/run/t4835.check14
-rw-r--r--test/files/run/t4835.scala76
-rw-r--r--test/files/run/t4935.check2
-rw-r--r--test/files/run/t4935.scala18
-rw-r--r--test/files/run/t5064.check25
-rw-r--r--test/files/run/t5064.scala23
-rw-r--r--test/files/run/t5224.check18
-rw-r--r--test/files/run/t5225_1.check8
-rw-r--r--test/files/run/t5225_2.check8
-rw-r--r--test/files/run/t5229_2.scala2
-rw-r--r--test/files/run/t5230.scala2
-rw-r--r--test/files/run/t5256a.check10
-rw-r--r--test/files/run/t5256b.check10
-rw-r--r--test/files/run/t5256c.check6
-rw-r--r--test/files/run/t5256d.check64
-rw-r--r--test/files/run/t5256e.check4
-rw-r--r--test/files/run/t5256f.check8
-rw-r--r--test/files/run/t5256g.check6
-rw-r--r--test/files/run/t5256h.check6
-rw-r--r--test/files/run/t5266_1.scala2
-rw-r--r--test/files/run/t5266_2.scala2
-rw-r--r--test/files/run/t5271_1.check24
-rw-r--r--test/files/run/t5271_2.check28
-rw-r--r--test/files/run/t5271_3.check42
-rw-r--r--test/files/run/t5334_1.scala2
-rw-r--r--test/files/run/t5334_2.scala2
-rw-r--r--test/files/run/t5356.check8
-rw-r--r--test/files/run/t5356.scala6
-rw-r--r--test/files/run/t5418a.check1
-rw-r--r--test/files/run/t5418a.scala3
-rw-r--r--test/files/run/t5418b.check2
-rw-r--r--test/files/run/t5418b.scala11
-rw-r--r--test/files/run/t5419.check2
-rw-r--r--test/files/run/t5423.scala2
-rw-r--r--test/files/run/t5680.check6
-rw-r--r--test/files/run/t5704.check2
-rw-r--r--test/files/run/t5713.check2
-rw-r--r--test/files/run/t5770.check10
-rw-r--r--test/files/run/t5770.scala25
-rw-r--r--test/files/run/t5816.check2
-rw-r--r--test/files/run/t5881.check4
-rw-r--r--test/files/run/t5940.scala41
-rw-r--r--test/files/run/t5942.check0
-rw-r--r--test/files/run/t5942.scala10
-rw-r--r--test/files/run/t5943a1.check1
-rw-r--r--test/files/run/t5943a1.scala9
-rw-r--r--test/files/run/t5943a2.check1
-rw-r--r--test/files/run/t5943a2.scala9
-rw-r--r--test/files/run/t6052.scala21
-rw-r--r--test/files/run/t6086-repl.check24
-rw-r--r--test/files/run/t6086-vanilla.check2
-rw-r--r--test/files/run/t6114.scala61
-rw-r--r--test/files/run/t6188.check2
-rw-r--r--test/files/run/t6197.check0
-rw-r--r--test/files/run/t6197.scala21
-rw-r--r--test/files/run/t6198.check0
-rw-r--r--test/files/run/t6198.scala24
-rw-r--r--test/files/run/t6199-mirror.check2
-rw-r--r--test/files/run/t6199-toolbox.check2
-rw-r--r--test/files/run/t6199-toolbox.scala2
-rw-r--r--test/files/run/t6220.scala92
-rw-r--r--test/files/run/t6223.check4
-rw-r--r--test/files/run/t6223.scala11
-rw-r--r--test/files/run/t6246.check90
-rw-r--r--test/files/run/t6246.scala26
-rw-r--r--test/files/run/t6260.check1
-rw-r--r--test/files/run/t6260.scala12
-rw-r--r--test/files/run/t6261.scala130
-rw-r--r--test/files/run/t6271.scala32
-rw-r--r--test/files/run/t6272.check10
-rw-r--r--test/files/run/t6272.scala62
-rw-r--r--test/files/run/t6273.check19
-rw-r--r--test/files/run/t6273.scala11
-rw-r--r--test/files/run/t6277.check1
-rw-r--r--test/files/run/t6277.scala9
-rw-r--r--test/files/run/t6287.check3
-rw-r--r--test/files/run/t6287.scala11
-rw-r--r--test/files/run/t6290.scala4
-rw-r--r--test/files/run/t6292.scala18
-rw-r--r--test/files/run/t6318_derived.check3
-rw-r--r--test/files/run/t6318_derived.scala15
-rw-r--r--test/files/run/t6318_primitives.check36
-rw-r--r--test/files/run/t6318_primitives.scala71
-rw-r--r--test/files/run/t6323b.check1
-rw-r--r--test/files/run/t6323b.scala21
-rw-r--r--test/files/run/t6327.check4
-rw-r--r--test/files/run/t6327.scala22
-rw-r--r--test/files/run/t6329_repl.check13
-rw-r--r--test/files/run/t6329_repl.scala8
-rw-r--r--test/files/run/t6329_repl_bug.check13
-rw-r--r--test/files/run/t6329_repl_bug.pending10
-rw-r--r--test/files/run/t6329_vanilla.check2
-rw-r--r--test/files/run/t6329_vanilla.scala4
-rw-r--r--test/files/run/t6329_vanilla_bug.check2
-rw-r--r--test/files/run/t6329_vanilla_bug.pending7
-rw-r--r--test/files/run/t6331.check23
-rw-r--r--test/files/run/t6331.scala71
-rw-r--r--test/files/run/t6331b.check30
-rw-r--r--test/files/run/t6331b.scala20
-rw-r--r--test/files/run/t6333.scala29
-rw-r--r--test/files/run/t6337a.scala16
-rw-r--r--test/files/run/t6344.check132
-rw-r--r--test/files/run/t6344.scala106
-rw-r--r--test/files/run/t6353.check1
-rw-r--r--test/files/run/t6353.scala12
-rw-r--r--test/files/run/t6392a.check1
-rw-r--r--test/files/run/t6392a.scala9
-rw-r--r--test/files/run/t6392b.check1
-rw-r--r--test/files/run/t6392b.scala9
-rw-r--r--test/files/run/t6394a.check1
-rw-r--r--test/files/run/t6394a.flags1
-rw-r--r--test/files/run/t6394a/Macros_1.scala12
-rw-r--r--test/files/run/t6394a/Test_2.scala4
-rw-r--r--test/files/run/t6394b.check1
-rw-r--r--test/files/run/t6394b.flags1
-rw-r--r--test/files/run/t6394b/Macros_1.scala12
-rw-r--r--test/files/run/t6394b/Test_2.scala4
-rw-r--r--test/files/run/t6410.check2
-rw-r--r--test/files/run/t6410.scala9
-rw-r--r--test/files/run/test-cpp.check130
-rw-r--r--test/files/run/toolbox_console_reporter.check8
-rw-r--r--test/files/run/toolbox_console_reporter.scala35
-rw-r--r--test/files/run/toolbox_default_reporter_is_silent.check2
-rw-r--r--test/files/run/toolbox_default_reporter_is_silent.scala2
-rw-r--r--test/files/run/toolbox_silent_reporter.scala6
-rw-r--r--test/files/run/toolbox_typecheck_implicitsdisabled.check10
-rw-r--r--test/files/run/toolbox_typecheck_inferimplicitvalue.check2
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled.check73
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled.scala9
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled2.check73
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled2.scala7
-rw-r--r--test/files/run/try-catch-unify.check4
-rw-r--r--test/files/run/try-catch-unify.scala16
-rw-r--r--test/files/run/typetags_core.check60
-rw-r--r--test/files/run/typetags_serialize.check4
-rw-r--r--test/files/run/typetags_serialize.scala3
-rw-r--r--test/files/run/typetags_without_scala_reflect_manifest_lookup.check0
-rw-r--r--test/files/run/typetags_without_scala_reflect_manifest_lookup.scala29
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_lookup.check3
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_lookup.scala45
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check3
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala46
-rw-r--r--test/files/run/valueclasses-classmanifest-basic.check2
-rw-r--r--test/files/run/valueclasses-classmanifest-existential.check2
-rw-r--r--test/files/run/valueclasses-classmanifest-generic.check2
-rw-r--r--test/files/run/valueclasses-classtag-basic.check2
-rw-r--r--test/files/run/valueclasses-classtag-existential.check2
-rw-r--r--test/files/run/valueclasses-classtag-generic.check2
-rw-r--r--test/files/run/valueclasses-manifest-basic.check2
-rw-r--r--test/files/run/valueclasses-manifest-existential.check2
-rw-r--r--test/files/run/valueclasses-manifest-generic.check2
-rw-r--r--test/files/run/valueclasses-pavlov.check2
-rw-r--r--test/files/run/valueclasses-pavlov.scala26
-rw-r--r--test/files/run/valueclasses-typetag-basic.check2
-rw-r--r--test/files/run/valueclasses-typetag-existential.check2
-rw-r--r--test/files/run/valueclasses-typetag-generic.check2
-rw-r--r--test/files/run/virtpatmat_typetag.check20
-rw-r--r--test/files/scalacheck/duration.scala69
-rw-r--r--test/files/scalacheck/redblacktree.scala42
-rw-r--r--test/flaky/pos/t2868.cmds (renamed from test/files/pos/t2868.cmds)0
-rw-r--r--test/flaky/pos/t2868/Jann.java (renamed from test/files/pos/t2868/Jann.java)0
-rw-r--r--test/flaky/pos/t2868/Nest.java (renamed from test/files/pos/t2868/Nest.java)0
-rw-r--r--test/flaky/pos/t2868/pick_1.scala (renamed from test/files/pos/t2868/pick_1.scala)0
-rw-r--r--test/flaky/pos/t2868/t2868_src_2.scala (renamed from test/files/pos/t2868/t2868_src_2.scala)0
-rw-r--r--test/osgi/src/BasicLibrary.scala37
-rw-r--r--test/osgi/src/BasicReflection.scala66
-rw-r--r--test/osgi/src/BasicTest.scala33
-rw-r--r--test/osgi/src/ReflectionToolboxTest.scala49
-rw-r--r--test/osgi/src/ScalaOsgiHelper.scala36
-rwxr-xr-xtest/partest.bat208
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b.check4
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b.flags1
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala (renamed from test/files/neg/macro-invalidusage-badbounds/Impls_1.scala)0
-rw-r--r--test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala8
-rw-r--r--test/pending/neg/reify_packed.check8
-rw-r--r--test/pending/neg/reify_packed.scala2
-rw-r--r--test/pending/pos/t3943/Outer_1.java14
-rw-r--r--test/pending/pos/t3943/test_2.scala8
-rw-r--r--test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check12
-rw-r--r--test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala2
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala4
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala4
-rw-r--r--test/pending/run/macro-reify-array/Macros_1.scala2
-rw-r--r--test/pending/run/macro-reify-tagful-b/Macros_1.scala2
-rw-r--r--test/pending/run/macro-reify-tagless-b.check4
-rw-r--r--test/pending/run/macro-reify-tagless-b/Test_2.scala2
-rw-r--r--test/pending/run/macro-reify-typetag-hktypeparams-notags.check4
-rw-r--r--test/pending/run/reflection-mem-eval.scala26
-rw-r--r--test/pending/run/reify_closure2b.scala2
-rw-r--r--test/pending/run/reify_closure3b.scala2
-rw-r--r--test/pending/run/reify_closure4b.scala2
-rw-r--r--test/pending/run/reify_closure5b.scala2
-rw-r--r--test/pending/run/reify_closure9a.scala2
-rw-r--r--test/pending/run/reify_closure9b.scala2
-rw-r--r--test/pending/run/reify_closures11.scala2
-rw-r--r--test/pending/run/reify_newimpl_09c.scala2
-rw-r--r--test/pending/run/reify_newimpl_46.scala2
-rw-r--r--test/pending/run/reify_newimpl_53.scala2
-rw-r--r--test/pending/run/t5943b1.scala10
-rw-r--r--test/pending/run/t5943b2.scala10
-rw-r--r--test/scaladoc/resources/implicits-known-type-classes-res.scala3
-rw-r--r--test/scaladoc/resources/links.scala37
-rw-r--r--test/scaladoc/run/implicits-known-type-classes.scala2
-rw-r--r--test/scaladoc/run/links.scala4
-rwxr-xr-xtools/binary-repo-lib.sh15
-rwxr-xr-xtools/class-dump6
-rw-r--r--tools/get-scala-commit-date.bat16
-rw-r--r--tools/get-scala-commit-sha.bat16
-rwxr-xr-xtools/jar-dump4
-rw-r--r--tools/push.jar.desired.sha12
1535 files changed, 23464 insertions, 22203 deletions
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000..958b0b9f28
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,26 @@
+# These files are text and should be normalized (convert crlf => lf)
+*.c text
+*.check text
+*.css text
+*.html text
+*.java text
+*.js text
+*.sbt text
+*.scala text
+*.sh text
+*.txt text
+*.xml text
+
+# Windows-specific files get windows endings
+*.bat eol=crlf
+*.cmd eol=crlf
+*-windows.tmpl eol=crlf
+
+# Some binary file types for completeness
+# (binary is a macro for -text -diff)
+*.dll binary
+*.gif binary
+*.jpg binary
+*.png binary
+*.class -text diff=class
+*.jar -text diff=jar
diff --git a/build.xml b/build.xml
index de73ffc51c..67d715bfe9 100644
--- a/build.xml
+++ b/build.xml
@@ -22,7 +22,7 @@ END-USER TARGETS
<target name="clean" depends="quick.clean"
description="Removes binaries of compiler and library. Distributions are untouched."/>
- <target name="test" depends="test.done"
+ <target name="test" depends="test.done, osgi.test"
description="Runs test suite and bootstrapping test on Scala compiler and library."/>
<target name="test-opt"
@@ -217,6 +217,7 @@ PROPERTIES
<property name="build-palo.dir" value="${build.dir}/palo"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
<property name="build-pack.dir" value="${build.dir}/pack"/>
+ <property name="build-osgi.dir" value="${build.dir}/osgi"/>
<property name="build-strap.dir" value="${build.dir}/strap"/>
<property name="build-docs.dir" value="${build.dir}/scaladoc"/>
<property name="build-libs.dir" value="${build.dir}/libs"/>
@@ -225,7 +226,7 @@ PROPERTIES
<property name="dists.dir" value="${basedir}/dists"/>
- <property name="copyright.string" value="Copyright 2002-2011, LAMP/EPFL"/>
+ <property name="copyright.string" value="Copyright 2002-2012, LAMP/EPFL"/>
<property name="partest.version.number" value="0.9.2"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
@@ -279,8 +280,32 @@ INITIALISATION
<property name="init.maven.tasks.finished" value="true" />
</target>
+ <target name="init.extra.tasks" depends="init.maven.tasks" unless="init.extra.tasks.finished">
+ <artifact:dependencies pathId="extra.tasks.classpath" filesetId="extra.tasks.fileset">
+ <dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
+ </artifact:dependencies>
+ <!-- Pax runner -->
+ <property name="pax.exam.version" value="2.5.0"/>
+ <artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-junit4" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-link-assembly" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="1.4.0"/>
+ <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-framework" version="1.5.1"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-core" version="0.9.20"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="0.9.20"/>
+ <dependency groupId="junit" artifactId="junit" version="4.10"/>
+ <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="3.2.2"/>
+ </artifact:dependencies>
+ <!-- BND support -->
+ <typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
+ <property name="init.maven.tasks.finished" value="true" />
+ </target>
+
<!-- Resolve maven dependencies -->
<target name="init.maven.jars" depends="init.maven.tasks">
+ <!-- This target has an issue where if the user directory does not exist, we BOMB. ugh. -->
+ <mkdir dir="${user.home}/.m2/repository"/>
<artifact:dependencies pathId="dependency.classpath" filesetId="dependency.fileset">
<!--<dependency groupId="com.typesafe" artifactId="config" version="0.4.0"/>-->
</artifact:dependencies>
@@ -398,7 +423,7 @@ INITIALISATION
</echo>
</target>
- <target name="init" depends="init.jars, init.maven.jars, init.version.done, init.fail.bad.jdk, init.warn.jdk7">
+ <target name="init" depends="init.jars, init.maven.jars, init.version.done, init.fail.bad.jdk, init.warn.jdk7, init.extra.tasks">
<property name="scalac.args.always" value="-Yreify-copypaste" />
<!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.optimise" value=""/>
@@ -1671,6 +1696,139 @@ PACKED QUICK BUILD (PACK)
</target>
<!-- ===========================================================================
+OSGi Artifacts
+============================================================================ -->
+
+ <target name="osgi.init" depends="pack.done">
+ <mkdir dir="${build-osgi.dir}"/>
+
+ <property name="osgi.test.src" value="${partest.dir}/osgi/src"/>
+ <property name="osgi.test.classes" value="${build-osgi.dir}/classes"/>
+
+ <!-- simplify fixing pom versions -->
+ <macrodef name="make-bundle">
+ <attribute name="name" />
+ <attribute name="version" />
+ <sequential>
+ <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="@{version}" />
+ </filterset>
+ </copy>
+ <bnd classpath="${build-pack.dir}/lib/@{name}.jar"
+ eclipse="false"
+ failok="false"
+ exceptions="true"
+ files="${build-osgi.dir}/@{name}.bnd"
+ output="${build-osgi.dir}"/>
+ </sequential>
+ </macrodef>
+ <macrodef name="make-plugin-bundle">
+ <attribute name="name" />
+ <attribute name="version" />
+ <sequential>
+ <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="@{version}" />
+ </filterset>
+ </copy>
+ <bnd classpath="${build-pack.dir}/misc/scala-devel/plugins/@{name}.jar"
+ eclipse="false"
+ failok="false"
+ exceptions="true"
+ files="${build-osgi.dir}/@{name}.bnd"
+ output="${build-osgi.dir}"/>
+ </sequential>
+ </macrodef>
+ <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.complete">
+ <srcfiles dir="${basedir}">
+ <include name="build.xml"/>
+ <include name="src/build/bnd/*.bnd"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="osgi.bundles" depends="osgi.init" unless="osgi.bundles.available">
+ <stopwatch name="osgi.bundle.timer"/>
+ <make-bundle name="scala-library" version="${osgi.version.number}" />
+ <make-bundle name="scala-actors" version="${osgi.version.number}" />
+ <make-bundle name="scala-actors-migration" version="${osgi.version.number}" />
+ <make-bundle name="scala-reflect" version="${osgi.version.number}" />
+ <make-bundle name="scala-compiler" version="${osgi.version.number}" />
+ <make-plugin-bundle name="continuations" version="${osgi.version.number}" />
+ <touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
+ <stopwatch name="osgi.bundle.timer" action="total"/>
+ </target>
+
+ <target name="osgi.bundles.swing" depends="osgi.init" if="has.java6" unless="osgi.bundles.available">
+ <!-- TODO - only if JDK6 -->
+ <make-bundle name="scala-swing" version="${osgi.version.number}"/>
+ </target>
+
+ <target name="osgi.done" depends="osgi.bundles, osgi.bundles.swing"/>
+
+ <target name="osgi.test.init" depends="osgi.done">
+ <path id="osgi.bundle.classpath">
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-library.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors-migration.jar"/>
+ </path>
+
+ <uptodate property="osgi.test.available" targetfile="${build-osgi.dir}/test-compile.complete">
+ <srcfiles dir="${osgi.test.src}">
+ <include name="**/*.scala"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="osgi.test.comp" depends="osgi.test.init, quick.done" unless="osgi.test.available">
+ <stopwatch name="osgi.test.comp.timer"/>
+ <mkdir dir="${osgi.test.classes}"/>
+ <scalacfork
+ destdir="${osgi.test.classes}"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${osgi.test.src}"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${osgi.test.classes}"/>
+ <path refid="osgi.bundle.classpath"/>
+ <path refid="pax.exam.classpath"/>
+ <path refid="forkjoin.classpath"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-osgi.dir}/test-compile.complete" verbose="no"/>
+ <stopwatch name="osgi.test.comp.timer" action="total"/>
+ </target>
+
+ <target name="osgi.test" depends="osgi.test.comp">
+ <stopwatch name="osgi.test.timer"/>
+ <mkdir dir="${osgi.test.classes}"/>
+ <junit fork="yes" haltonfailure="yes">
+ <classpath>
+ <pathelement location="${osgi.test.classes}"/>
+ <path refid="osgi.bundle.classpath"/>
+ <path refid="pax.exam.classpath"/>
+ <path refid="forkjoin.classpath"/>
+ </classpath>
+ <batchtest fork="yes" todir="${build-osgi.dir}">
+ <fileset dir="${osgi.test.classes}">
+ <include name="**/*Test.class"/>
+ </fileset>
+ </batchtest>
+ <formatter type="brief" usefile="false" />
+ </junit>
+ <stopwatch name="osgi.test.timer" action="total"/>
+ </target>
+
+ <target name="osgi.clean">
+ <delete dir="${build-osgi.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ </target>
+
+<!-- ===========================================================================
BOOTSTRAPPING BUILD (STRAP)
============================================================================ -->
@@ -2164,6 +2322,7 @@ DOCUMENTATION
<include name="swing/**"/>
<include name="actors/**"/>
<include name="actors-migration/**"/>
+ <include name="reflect/**"/>
</source-includes>
</doc-uptodate-check>
</target>
@@ -2178,6 +2337,7 @@ DOCUMENTATION
docfooter="epfl"
docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
docUncompilable="${src.dir}/library-aux"
+ skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.runtime:scala.reflect.io"
sourcepath="${src.dir}"
classpathref="pack.classpath"
addparams="${scalac.args.all}"
@@ -2550,9 +2710,24 @@ BOOTRAPING TEST AND TEST SUITE
DISTRIBUTION
============================================================================ -->
- <target name="dist.start" depends="pack.done">
+ <target name="dist.start" depends="pack.done, osgi.done">
<property name="dist.name" value="scala-${version.number}"/>
<property name="dist.dir" value="${dists.dir}/${dist.name}"/>
+
+ <macrodef name="copy-bundle">
+ <attribute name="name" />
+ <sequential>
+ <copy file="${build-osgi.dir}/org.scala-lang.@{name}.jar"
+ tofile="${dist.dir}/lib/@{name}.jar"/>
+ </sequential>
+ </macrodef>
+ <macrodef name="copy-plugin-bundle">
+ <attribute name="name" />
+ <sequential>
+ <copy file="${build-osgi.dir}/org.scala-lang.plugins.@{name}.jar"
+ tofile="${dist.dir}/misc/scala-devel/plugins/@{name}.jar"/>
+ </sequential>
+ </macrodef>
</target>
<target name="dist.base" depends="dist.start">
@@ -2561,6 +2736,13 @@ DISTRIBUTION
<fileset dir="${build-pack.dir}/lib"/>
</copy>
<mkdir dir="${dist.dir}/bin"/>
+ <!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
+ <copy-bundle name="scala-library"/>
+ <copy-bundle name="scala-reflect"/>
+ <copy-bundle name="scala-swing"/>
+ <copy-bundle name="scala-actors"/>
+ <copy-bundle name="scala-actors-migration"/>
+ <copy-bundle name="scala-compiler"/>
<copy toDir="${dist.dir}/bin">
<fileset dir="${build-pack.dir}/bin"/>
</copy>
@@ -2570,9 +2752,7 @@ DISTRIBUTION
<chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
<mkdir dir="${dist.dir}/misc/scala-devel/plugins"/>
- <copy toDir="${dist.dir}/misc/scala-devel/plugins">
- <fileset dir="${build-pack.dir}/misc/scala-devel/plugins"/>
- </copy>
+ <copy-plugin-bundle name="continuations"/>
</target>
<target name="dist.doc" depends="dist.base, docs.done">
@@ -2660,7 +2840,7 @@ TEST AND DISTRIBUTION BUNDLE (ALL)
<target name="all.done" depends="dist.done, test.done"/>
- <target name="all.clean" depends="locker.clean, docs.clean, dist.clean, sbt.clean"/>
+ <target name="all.clean" depends="locker.clean, docs.clean, dist.clean, sbt.clean, osgi.clean"/>
<!-- ===========================================================================
STABLE REFERENCE (STARR)
diff --git a/docs/LICENSE b/docs/LICENSE
index 38d16361bd..de950bdf57 100644
--- a/docs/LICENSE
+++ b/docs/LICENSE
@@ -1,6 +1,6 @@
SCALA LICENSE
-Copyright (c) 2002-2011 EPFL, Lausanne, unless otherwise specified.
+Copyright (c) 2002-2012 EPFL, Lausanne, unless otherwise specified.
All rights reserved.
This software was developed by the Programming Methods Laboratory of the
diff --git a/gitconfig.SAMPLE b/gitconfig.SAMPLE
new file mode 100644
index 0000000000..d90c3bfb02
--- /dev/null
+++ b/gitconfig.SAMPLE
@@ -0,0 +1,8 @@
+# With something like this in .git/config or ~/.gitconfig
+# you can diff class files and jar files.
+[diff "class"]
+ textconv = tools/class-dump
+ cachetextconv = true
+[diff "jar"]
+ textconv = tools/jar-dump
+ cachetextconv = true
diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1
index 95973402e6..082d86ff67 100644
--- a/lib/scala-compiler-src.jar.desired.sha1
+++ b/lib/scala-compiler-src.jar.desired.sha1
@@ -1 +1 @@
-67257bb7ce010e2ceac800d737e202cfbfc2a1f6 ?scala-compiler-src.jar
+cfa3ee21f76cd5c115bd3bc070a3b401587bafb5 ?scala-compiler-src.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index 7f023e00be..bb39b4d6a6 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-c52277de9e76187f34a5ae073e5d2aacc592ac50 ?scala-compiler.jar
+d54b99f215d4d42b3f0b3489fbb1081270700992 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index f2d287f120..cd42c23291 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-40b9c97e0c3abea4c460b73ca178f60a3bfea242 ?scala-library-src.jar
+8bdac1cdd60b73ff7e12fd2b556355fa10343e2d ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 99a06c2024..6bdeaa903b 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-0a8e0e39e1e6713323a2e659aab743ccfa57c071 ?scala-library.jar
+1e0e39fae15b42e85998740511ec5a3830e26243 ?scala-library.jar
diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1
index 1752dec7f6..d630c938f2 100644
--- a/lib/scala-reflect-src.jar.desired.sha1
+++ b/lib/scala-reflect-src.jar.desired.sha1
@@ -1 +1 @@
-d1abf389fbf5dfc95889a181d28f94a6779c6165 ?scala-reflect-src.jar
+d229f4c91ea8ab1a81559b5803efd9b0b1632f0b ?scala-reflect-src.jar
diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1
index 4cc99695e0..a5d6701749 100644
--- a/lib/scala-reflect.jar.desired.sha1
+++ b/lib/scala-reflect.jar.desired.sha1
@@ -1 +1 @@
-02b44e860b9b9abd1353bbaa1004b3f0004dd0b3 ?scala-reflect.jar
+288f47dbe1002653e030fd25ca500b9ffe1ebd64 ?scala-reflect.jar
diff --git a/project/Build.scala b/project/Build.scala
index 58d322108b..d8468032ef 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -215,7 +215,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
- lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap)
+ lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap,asm)
lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
name := "scalap",
exportJars := true
diff --git a/project/Partest.scala b/project/Partest.scala
index bbc160a41d..fbb0a2a980 100644
--- a/project/Partest.scala
+++ b/project/Partest.scala
@@ -58,8 +58,8 @@ object partest {
val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
// TODO - save results
val failures = results collect {
- case (path, 1) => path + " [FAILED]"
- case (path, 2) => path + " [TIMEOUT]"
+ case (path, "FAIL") => path + " [FAILED]"
+ case (path, "TIMEOUT") => path + " [TIMEOUT]"
}
if (failures.isEmpty)
@@ -115,7 +115,7 @@ object partest {
}
}
- def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: SettingKey[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
+ def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: TaskKey[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
(classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
}
@@ -128,11 +128,11 @@ class PartestRunner(classpath: Seq[File], javaOpts: String) {
(c,m)
}
lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
- def run(args: Array[String]): java.util.Map[String,Int] = try {
+ def run(args: Array[String]): java.util.Map[String,String] = try {
// TODO - undo this settings after running. Also globals are bad.
System.setProperty("partest.java_opts", javaOpts)
val allArgs = (classPathArgs ++ args).toArray
- mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,Int]]
+ mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,String]]
} catch {
case e =>
//error("Could not run Partest: " + e)
diff --git a/project/Versions.scala b/project/Versions.scala
index b588ec55ac..cc0ab7ff2b 100644
--- a/project/Versions.scala
+++ b/project/Versions.scala
@@ -67,7 +67,7 @@ object Versions {
IO.write(f, "version.number = "+versions.canonical+"\n"+
"osgi.number = "+versions.osgi+"\n"+
"maven.number = "+versions.maven+"\n"+
- "copyright.string = Copyright 2002-2011, LAMP/EPFL")
+ "copyright.string = Copyright 2002-2012, LAMP/EPFL")
def makeCanonicalVersion(isRelease: Boolean, mvnVersion: String, base: BaseBuildNumber, gitDate: String, gitSha: String): String =
if(isRelease) mvnVersion
diff --git a/project/plugins.sbt b/project/plugins.sbt
index b49ece7527..fdf37e31a6 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -4,6 +4,6 @@ resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline
resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
-libraryDependencies += "net.databinder" %% "dispatch-http" % "0.8.6"
+libraryDependencies += "net.databinder" % "dispatch-http_2.9.1" % "0.8.6"
diff --git a/project/project/Build.scala b/project/project/Build.scala
index bd1250fc39..902e8b0fb3 100644
--- a/project/project/Build.scala
+++ b/project/project/Build.scala
@@ -2,6 +2,6 @@ import sbt._
object PluginDef extends Build {
override def projects = Seq(root)
lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
- lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.11")
+ lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.12")
lazy val git = uri("git://github.com/sbt/sbt-git-plugin.git#scala-build")
}
diff --git a/src/actors-migration/scala/actors/MigrationSystem.scala b/src/actors-migration/scala/actors/migration/MigrationSystem.scala
index ffc93d9c6f..3dcb38e634 100644
--- a/src/actors-migration/scala/actors/MigrationSystem.scala
+++ b/src/actors-migration/scala/actors/migration/MigrationSystem.scala
@@ -1,10 +1,11 @@
-package scala.actors
+package scala.actors.migration
+import scala.actors._
import scala.collection._
object MigrationSystem {
- private[actors] val contextStack = new ThreadLocal[immutable.Stack[Boolean]] {
+ private[migration] val contextStack = new ThreadLocal[immutable.Stack[Boolean]] {
override def initialValue() = immutable.Stack[Boolean]()
}
diff --git a/src/actors-migration/scala/actors/Pattern.scala b/src/actors-migration/scala/actors/migration/Pattern.scala
index 26e9d1bb64..25ba191ce7 100644
--- a/src/actors-migration/scala/actors/Pattern.scala
+++ b/src/actors-migration/scala/actors/migration/Pattern.scala
@@ -1,11 +1,12 @@
-package scala.actors
+package scala.actors.migration
-import scala.concurrent.util.Duration
+import scala.actors._
+import scala.concurrent.duration.Duration
import language.implicitConversions
object pattern {
- implicit def askSupport(ar: ActorRef): AskableActorRef =
+ implicit def ask(ar: ActorRef): AskableActorRef =
new AskableActorRef(ar)
}
@@ -16,11 +17,11 @@ class AskableActorRef(val ar: ActorRef) extends ActorRef {
def !(message: Any)(implicit sender: ActorRef = null): Unit = ar.!(message)(sender)
- def ?(message: Any)(timeout: Timeout): Future[Any] = ar.?(message, timeout.duration)
+ def ?(message: Any)(implicit timeout: Timeout): scala.concurrent.Future[Any] = ar.?(message, timeout.duration)
- private[actors] def ?(message: Any, timeout: Duration): Future[Any] = ar.?(message, timeout)
+ private[actors] def ?(message: Any, timeout: Duration): scala.concurrent.Future[Any] = ar.?(message, timeout)
def forward(message: Any) = ar.forward(message)
private[actors] def localActor: AbstractActor = ar.localActor
-} \ No newline at end of file
+}
diff --git a/src/actors-migration/scala/actors/Props.scala b/src/actors-migration/scala/actors/migration/Props.scala
index 891e23213a..00bc9d93f8 100644
--- a/src/actors-migration/scala/actors/Props.scala
+++ b/src/actors-migration/scala/actors/migration/Props.scala
@@ -1,4 +1,6 @@
-package scala.actors
+package scala.actors.migration
+
+import scala.actors._
/**
* ActorRef configuration object. It represents the minimal subset of Akka Props class.
@@ -8,6 +10,5 @@ case class Props(creator: () ⇒ InternalActor, dispatcher: String) {
/**
* Returns a new Props with the specified creator set
*/
- def withCreator(c: ⇒ InternalActor) = copy(creator = () ⇒ c)
-
+ final def withCreator(c: ⇒ InternalActor) = copy(creator = () ⇒ c)
}
diff --git a/src/actors-migration/scala/actors/StashingActor.scala b/src/actors-migration/scala/actors/migration/StashingActor.scala
index 8f96e1b002..d0a1432e72 100644
--- a/src/actors-migration/scala/actors/StashingActor.scala
+++ b/src/actors-migration/scala/actors/migration/StashingActor.scala
@@ -1,9 +1,11 @@
-package scala.actors
+package scala.actors.migration
+import scala.actors._
+import scala.actors.Actor._
import scala.collection._
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
-import language.implicitConversions
+import scala.language.implicitConversions
object StashingActor extends Combinators {
implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
@@ -16,7 +18,7 @@ trait StashingActor extends InternalActor {
type Receive = PartialFunction[Any, Unit]
// checks if StashingActor is created within the actorOf block
- creationCheck;
+ creationCheck()
private[actors] val ref = new InternalActorRef(this)
@@ -110,8 +112,7 @@ trait StashingActor extends InternalActor {
/*
* Checks that StashingActor can be created only by MigrationSystem.actorOf method.
*/
- private[this] def creationCheck = {
-
+ private[this] def creationCheck(): Unit = {
// creation check (see ActorRef)
val context = MigrationSystem.contextStack.get
if (context.isEmpty)
diff --git a/src/actors-migration/scala/actors/Timeout.scala b/src/actors-migration/scala/actors/migration/Timeout.scala
index 7e400ab140..32ea5f20fc 100644
--- a/src/actors-migration/scala/actors/Timeout.scala
+++ b/src/actors-migration/scala/actors/migration/Timeout.scala
@@ -6,11 +6,11 @@
** |/ **
\* */
-package scala.actors
+package scala.actors.migration
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
-import language.implicitConversions
+import scala.language.implicitConversions
case class Timeout(duration: Duration) {
def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index dec91859bb..fd11b9a0a8 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -8,7 +8,7 @@
package scala.actors
-import language.higherKinds
+import scala.language.higherKinds
/**
* @author Philipp Haller
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 0b1e7fb1e9..8869165062 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -10,7 +10,7 @@ package scala.actors
import scala.util.control.ControlThrowable
import java.util.{Timer, TimerTask}
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* Provides functions for the definition of actors, as well as actor
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
index 8f70b13e01..5e0ca1554a 100644
--- a/src/actors/scala/actors/ActorRef.scala
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -1,7 +1,9 @@
package scala.actors
import java.util.concurrent.TimeoutException
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
+import scala.concurrent.Promise
+import scala.concurrent.ExecutionContext.Implicits.global
/**
* Trait used for migration of Scala actors to Akka.
@@ -28,7 +30,7 @@ trait ActorRef {
/**
* Sends a message asynchronously, returning a future which may eventually hold the reply.
*/
- private[actors] def ?(message: Any, timeout: Duration): Future[Any]
+ private[actors] def ?(message: Any, timeout: Duration): scala.concurrent.Future[Any]
/**
* Forwards the message and passes the original sender actor as the sender.
@@ -43,7 +45,7 @@ trait ActorRef {
private[actors] class OutputChannelRef(val actor: OutputChannel[Any]) extends ActorRef {
- override private[actors] def ?(message: Any, timeout: Duration): Future[Any] =
+ override private[actors] def ?(message: Any, timeout: Duration): scala.concurrent.Future[Any] =
throw new UnsupportedOperationException("Output channel does not support ?")
/**
@@ -88,14 +90,19 @@ private[actors] final class InternalActorRef(override val actor: InternalActor)
/**
* Sends a message asynchronously, returning a future which may eventually hold the reply.
*/
- override private[actors] def ?(message: Any, timeout: Duration): Future[Any] =
- Futures.future {
- val dur = if (timeout.isFinite()) timeout.toMillis else (java.lang.Long.MAX_VALUE >> 2)
- actor !? (dur, message) match {
- case Some(x) => x
- case None => new AskTimeoutException("? operation timed out.")
+ override private[actors] def ?(message: Any, timeout: Duration): scala.concurrent.Future[Any] = {
+ val dur = if (timeout.isFinite()) timeout.toMillis else (java.lang.Long.MAX_VALUE >> 2)
+ val replyPromise = Promise[Any]
+ scala.concurrent.future {
+ scala.concurrent.blocking {
+ actor !? (dur, message)
+ } match {
+ case Some(x) => replyPromise success x
+ case None => replyPromise failure new AskTimeoutException("? operation timed out.")
}
}
+ replyPromise.future
+ }
override def !(message: Any)(implicit sender: ActorRef = null): Unit =
if (message == PoisonPill)
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
index 9bf0022247..92ab23dae1 100644
--- a/src/actors/scala/actors/CanReply.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -8,7 +8,7 @@
package scala.actors
-import language.higherKinds
+import scala.language.higherKinds
/**
* Defines result-bearing message send operations.
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
index dd704436fc..aef01b67a8 100644
--- a/src/actors/scala/actors/Combinators.scala
+++ b/src/actors/scala/actors/Combinators.scala
@@ -10,7 +10,7 @@
package scala.actors
-import language.implicitConversions
+import scala.language.implicitConversions
private[actors] trait Combinators {
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 735c13190b..fb7bb488a2 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -174,7 +174,7 @@ object Futures {
* or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- var resultsMap: collection.mutable.Map[Int, Option[Any]] = new collection.mutable.HashMap[Int, Option[Any]]
+ var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
val mappedFts = fts.map(ft =>
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index c962bb9d3d..11c910e577 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -12,7 +12,7 @@ package scala.actors
import scala.actors.scheduler.{DelegatingScheduler, ExecutorScheduler,
ForkJoinScheduler, ThreadPoolConfig}
import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue}
-import language.implicitConversions
+import scala.language.implicitConversions
private[actors] object Reactor {
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index a7bf8ec2ba..59f4afccc4 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -10,7 +10,7 @@
package scala.actors
package scheduler
-import util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
+import scala.util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
/**
* @author Erik Engbrecht
diff --git a/src/build/InnerObjectTestGen.scala b/src/build/InnerObjectTestGen.scala
index 5945bc17c7..b66112609c 100644
--- a/src/build/InnerObjectTestGen.scala
+++ b/src/build/InnerObjectTestGen.scala
@@ -1,4 +1,4 @@
-import collection.mutable
+import scala.collection.mutable
/** All contexts where objects can be embedded. */
object Contexts extends Enumeration {
diff --git a/src/build/bnd/continuations.bnd b/src/build/bnd/continuations.bnd
new file mode 100644
index 0000000000..748502f653
--- /dev/null
+++ b/src/build/bnd/continuations.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Continuations Plugin
+Bundle-SymbolicName: org.scala-lang.plugins.continuations
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-actors-migration.bnd b/src/build/bnd/scala-actors-migration.bnd
new file mode 100644
index 0000000000..2cddfb620a
--- /dev/null
+++ b/src/build/bnd/scala-actors-migration.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Actors Migration
+Bundle-SymbolicName: org.scala-lang.scala-actors-migration
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-actors.bnd b/src/build/bnd/scala-actors.bnd
new file mode 100644
index 0000000000..8d0555777f
--- /dev/null
+++ b/src/build/bnd/scala-actors.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Actors
+Bundle-SymbolicName: org.scala-lang.scala-actors
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd
new file mode 100644
index 0000000000..c289843447
--- /dev/null
+++ b/src/build/bnd/scala-compiler.bnd
@@ -0,0 +1,8 @@
+Bundle-Name: Scala Compiler
+Bundle-SymbolicName: org.scala-lang.scala-compiler
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: scala.tools.jline.*;resolution:=optional, \
+ org.apache.tools.ant.*;resolution:=optional, \
+ *
diff --git a/src/build/bnd/scala-library.bnd b/src/build/bnd/scala-library.bnd
new file mode 100644
index 0000000000..03aff45672
--- /dev/null
+++ b/src/build/bnd/scala-library.bnd
@@ -0,0 +1,6 @@
+Bundle-Name: Scala Standard Library
+Bundle-SymbolicName: org.scala-lang.scala-library
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: sun.misc;resolution:=optional, *
diff --git a/src/build/bnd/scala-reflect.bnd b/src/build/bnd/scala-reflect.bnd
new file mode 100644
index 0000000000..6cda346d3a
--- /dev/null
+++ b/src/build/bnd/scala-reflect.bnd
@@ -0,0 +1,6 @@
+Bundle-Name: Scala Reflect
+Bundle-SymbolicName: org.scala-lang.scala-reflect
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: scala.tools.nsc;resolution:=optional, *
diff --git a/src/build/bnd/scala-swing.bnd b/src/build/bnd/scala-swing.bnd
new file mode 100644
index 0000000000..eeacb9bd3f
--- /dev/null
+++ b/src/build/bnd/scala-swing.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Swing
+Bundle-SymbolicName: org.scala-lang.scala-swing
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 83a65e6876..b9511c1ad2 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-import language.postfixOps
+import scala.language.postfixOps
/** This program generates the ProductN, TupleN, FunctionN,
* and AbstractFunctionN, where 0 <= N <= MAX_ARITY.
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 564d290967..1735b93f3f 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -29,10 +29,15 @@ MAIN DISTRIBUTION PACKAGING
<tarfileset dir="${dist.dir}" prefix="${dist.name}" excludes="bin/**"/>
</tar>
<gzip src="${dists.dir}/archives/${dist.name}.tar" destfile="${dists.dir}/archives/${dist.name}.tgz"/>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}.tar"/>
- </exec>
- <move file="${dists.dir}/archives/${dist.name}.tar.xz" tofile="${dists.dir}/archives/${dist.name}.txz" failonerror="false"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}.tar"/>
+ </exec>
+ <move file="${dists.dir}/archives/${dist.name}.tar.xz" tofile="${dists.dir}/archives/${dist.name}.txz" failonerror="false"/>
+ </then>
+ </if>
<delete file="${dists.dir}/archives/${dist.name}.tar" />
<checksum fileext=".md5">
<fileset dir="${dists.dir}/archives">
@@ -54,10 +59,15 @@ MAIN DISTRIBUTION PACKAGING
<tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="${dist.name}-devel-docs"/>
</tar>
<gzip src="${dists.dir}/archives/${dist.name}-devel-docs.tar" destfile="${dists.dir}/archives/${dist.name}-devel-docs.tgz"/>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-devel-docs.tar"/>
- </exec>
- <move file="${dists.dir}/archives/${dist.name}-devel-docs.tar.xz" tofile="${dists.dir}/archives/${dist.name}-devel-docs.txz" failonerror="false"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-devel-docs.tar"/>
+ </exec>
+ <move file="${dists.dir}/archives/${dist.name}-devel-docs.tar.xz" tofile="${dists.dir}/archives/${dist.name}-devel-docs.txz" failonerror="false"/>
+ </then>
+ </if>
<delete file="${dists.dir}/archives/${dist.name}-devel-docs.tar" />
<checksum fileext=".md5">
<fileset dir="${dists.dir}/archives">
@@ -84,10 +94,15 @@ MAIN DISTRIBUTION PACKAGING
</tarfileset>
</tar>
<gzip src="${dists.dir}/archives/${dist.name}-sources.tar" destfile="${dists.dir}/archives/${dist.name}-sources.tgz"/>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-sources.tar"/>
- </exec>
- <move file="${dists.dir}/archives/${dist.name}-sources.tar.xz" tofile="${dists.dir}/archives/${dist.name}-sources.txz" failonerror="false"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-sources.tar"/>
+ </exec>
+ <move file="${dists.dir}/archives/${dist.name}-sources.tar.xz" tofile="${dists.dir}/archives/${dist.name}-sources.txz" failonerror="false"/>
+ </then>
+ </if>
<delete file="${dists.dir}/archives/${dist.name}-sources.tar" />
<checksum fileext=".md5">
<fileset dir="${dists.dir}/archives">
diff --git a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala
index f45dde8a85..4e4d88c0be 100644
--- a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala
+++ b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala
@@ -2,5 +2,6 @@ package scala.reflect.macros
package runtime
import scala.reflect.internal.util.Position
+import scala.util.control.ControlThrowable
-class AbortMacroException(val pos: Position, val msg: String) extends Throwable(msg)
+class AbortMacroException(val pos: Position, val msg: String) extends Throwable(msg) with ControlThrowable \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
index 8b742755cd..ff870e728e 100644
--- a/src/compiler/scala/reflect/macros/runtime/Aliases.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
@@ -10,17 +10,22 @@ trait Aliases {
override type TermName = universe.TermName
override type TypeName = universe.TypeName
override type Tree = universe.Tree
- // override type Position = universe.Position
+ override type Position = universe.Position
override type Scope = universe.Scope
override type Modifiers = universe.Modifiers
override type Expr[+T] = universe.Expr[T]
override val Expr = universe.Expr
+ def Expr[T: WeakTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
- override type AbsTypeTag[T] = universe.AbsTypeTag[T]
+ override type WeakTypeTag[T] = universe.WeakTypeTag[T]
override type TypeTag[T] = universe.TypeTag[T]
- override val AbsTypeTag = universe.AbsTypeTag
+ override val WeakTypeTag = universe.WeakTypeTag
override val TypeTag = universe.TypeTag
+ def WeakTypeTag[T](tpe: Type): WeakTypeTag[T] = universe.WeakTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+ def TypeTag[T](tpe: Type): TypeTag[T] = universe.TypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+ override def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
override def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+ override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/CapturedVariables.scala b/src/compiler/scala/reflect/macros/runtime/CapturedVariables.scala
deleted file mode 100644
index 78fb7100b0..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/CapturedVariables.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait CapturedVariables {
- self: Context =>
-
- import mirror._
- import universe._
-
- def captureVariable(vble: Symbol): Unit = universe.captureVariable(vble)
-
- def referenceCapturedVariable(vble: Symbol): Tree = universe.referenceCapturedVariable(vble)
-
- def capturedVariableType(vble: Symbol): Type = universe.capturedVariableType(vble)
-} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
index 8bbfef44a3..8e8b0fcea1 100644
--- a/src/compiler/scala/reflect/macros/runtime/Context.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Context.scala
@@ -5,24 +5,20 @@ import scala.tools.nsc.Global
abstract class Context extends scala.reflect.macros.Context
with Aliases
- with CapturedVariables
- with Infrastructure
with Enclosures
with Names
with Reifiers
with FrontEnds
- with Settings
+ with Infrastructure
with Typers
with Parsers
- with Exprs
- with TypeTags
with Evals
with ExprUtils
with Traces {
val universe: Global
- val mirror: MirrorOf[universe.type] = universe.rootMirror
+ val mirror: universe.Mirror = universe.rootMirror
val callsiteTyper: universe.analyzer.Typer
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
index ebde4447d7..be5f2dbe83 100644
--- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
@@ -14,10 +14,11 @@ trait Enclosures {
// vals are eager to simplify debugging
// after all we wouldn't save that much time by making them lazy
val macroApplication: Tree = expandee
- val enclosingClass: Tree = site.enclClass.tree
+ val enclosingClass: Tree = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree
val enclosingImplicits: List[(Type, Tree)] = site.openImplicits
val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
val enclosingMethod: Tree = site.enclMethod.tree
val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
- val enclosingUnit: CompilationUnit = currentRun.currentUnit
+ val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
+ val enclosingRun: Run = universe.currentRun
}
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/runtime/Evals.scala
index 348e29cdd7..1f7b5f2ff1 100644
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Evals.scala
@@ -7,12 +7,12 @@ import scala.tools.reflect.ToolBox
trait Evals {
self: Context =>
- private lazy val evalMirror = ru.runtimeMirror(libraryClassLoader)
+ private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.macroClassloader)
private lazy val evalToolBox = evalMirror.mkToolBox()
private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
def eval[T](expr: Expr[T]): T = {
val imported = evalImporter.importTree(expr.tree)
- evalToolBox.runExpr(imported).asInstanceOf[T]
+ evalToolBox.eval(imported).asInstanceOf[T]
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Exprs.scala b/src/compiler/scala/reflect/macros/runtime/Exprs.scala
deleted file mode 100644
index 4217a6a404..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Exprs.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Exprs {
- self: Context =>
-
- def Expr[T: AbsTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala b/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
index 69fa416f8f..a6a198e1b4 100644
--- a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
+++ b/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
@@ -1,28 +1,9 @@
package scala.reflect.macros
package runtime
-trait FrontEnds extends scala.tools.reflect.FrontEnds {
+trait FrontEnds {
self: Context =>
- import universe._
- import mirror._
-
- override type Position = universe.Position
-
- def frontEnd: FrontEnd = wrapReporter(universe.reporter)
-
- def setFrontEnd(frontEnd: FrontEnd): this.type = {
- universe.reporter = wrapFrontEnd(frontEnd)
- this
- }
-
- def withFrontEnd[T](frontEnd: FrontEnd)(op: => T): T = {
- val old = universe.reporter
- setFrontEnd(frontEnd)
- try op
- finally universe.reporter = old
- }
-
def echo(pos: Position, msg: String): Unit = universe.reporter.echo(pos, msg)
def info(pos: Position, msg: String, force: Boolean): Unit = universe.reporter.info(pos, msg, force)
@@ -35,13 +16,5 @@ trait FrontEnds extends scala.tools.reflect.FrontEnds {
def error(pos: Position, msg: String): Unit = callsiteTyper.context.error(pos, msg)
- def abort(pos: Position, msg: String): Nothing = {
- callsiteTyper.context.error(pos, msg)
- throw new AbortMacroException(pos, msg)
- }
-
- def interactive(): Unit = universe.reporter match {
- case reporter: tools.nsc.reporters.AbstractReporter => reporter.displayPrompt()
- case _ => ()
- }
-} \ No newline at end of file
+ def abort(pos: Position, msg: String): Nothing = throw new AbortMacroException(pos, msg)
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
index 19fb03364e..7781693822 100644
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
@@ -1,52 +1,16 @@
package scala.reflect.macros
package runtime
-import scala.tools.nsc.util.ScalaClassLoader
-
trait Infrastructure {
self: Context =>
- val forJVM: Boolean = universe.forJVM
-
- val forMSIL: Boolean = universe.forMSIL
-
- val forInteractive: Boolean = universe.forInteractive
-
- val forScaladoc: Boolean = universe.forScaladoc
-
- val currentRun: Run = universe.currentRun
-
- val libraryClassPath: List[java.net.URL] = universe.classPath.asURLs
-
- lazy val libraryClassLoader: ClassLoader = {
- val classpath = libraryClassPath
- var loader: ClassLoader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
-
- // [Eugene] a heuristic to detect REPL
- if (universe.settings.exposeEmptyPackage.value) {
- import scala.tools.nsc.interpreter._
- val virtualDirectory = universe.settings.outputDirs.getSingleOutput.get
- loader = new AbstractFileClassLoader(virtualDirectory, loader) {}
- }
-
- loader
- }
-
- type Run = universe.Run
-
- object Run extends RunExtractor {
- def unapply(run: Run): Option[(CompilationUnit, List[CompilationUnit])] = Some(run.currentUnit, run.units.toList)
+ def settings: List[String] = {
+ val us = universe.settings
+ import us._
+ userSetSettings collectFirst { case x: MultiStringSetting if x.name == XmacroSettings.name => x.value } getOrElse Nil
}
- type CompilationUnit = universe.CompilationUnit
-
- object CompilationUnit extends CompilationUnitExtractor {
- def unapply(compilationUnit: CompilationUnit): Option[(java.io.File, Array[Char], Tree)] = Some(compilationUnit.source.file.file, compilationUnit.source.content, compilationUnit.body)
- }
-
- val currentMacro: Symbol = expandee.symbol
-
- val globalCache: collection.mutable.Map[Any, Any] = universe.analyzer.globalMacroCache
+ def compilerSettings: List[String] = universe.settings.recreateArgs
- val cache: collection.mutable.Map[Any, Any] = universe.analyzer.perRunMacroCache.getOrElseUpdate(currentMacro, collection.mutable.Map[Any, Any]())
-} \ No newline at end of file
+ def classPath: List[java.net.URL] = global.classPath.asURLs
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
index 6d89b71f39..ab9b94ec9f 100644
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
@@ -1,7 +1,7 @@
package scala.reflect.macros
package runtime
-import language.existentials
+import scala.language.existentials
import scala.tools.reflect.ToolBox
import scala.tools.reflect.ToolBoxError
@@ -10,16 +10,15 @@ trait Parsers {
def parse(code: String): Tree =
// todo. provide decent implementation
+ // see `Typers.typedUseCase` for details
try {
import scala.reflect.runtime.{universe => ru}
- val parsed = ru.rootMirror.mkToolBox().parseExpr(code)
+ val parsed = ru.rootMirror.mkToolBox().parse(code)
val importer = universe.mkImporter(ru)
importer.importTree(parsed)
} catch {
case ToolBoxError(msg, cause) =>
+ // todo. provide a position
throw new ParseError(universe.NoPosition, msg)
}
-
- case class ParseError(val pos: Position, val msg: String) extends Throwable(msg)
- object ParseError extends ParseErrorExtractor
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
index ab1de4288b..9e11e5e26d 100644
--- a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
@@ -13,17 +13,15 @@ trait Reifiers {
import universe._
import definitions._
- lazy val basisUniverse: Tree = gen.mkBasisUniverseRef
-
- lazy val runtimeUniverse: Tree = gen.mkRuntimeUniverseRef
-
def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree = {
+ assert(ExprClass != NoSymbol)
val result = scala.reflect.reify.`package`.reifyTree(self.universe)(callsiteTyper, universe, mirror, tree)
logFreeVars(enclosingPosition, result)
result
}
def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree = {
+ assert(TypeTagsClass != NoSymbol)
val result = scala.reflect.reify.`package`.reifyType(self.universe)(callsiteTyper, universe, mirror, tpe, concrete)
logFreeVars(enclosingPosition, result)
result
diff --git a/src/compiler/scala/reflect/macros/runtime/Settings.scala b/src/compiler/scala/reflect/macros/runtime/Settings.scala
deleted file mode 100644
index 9c24273cd7..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Settings.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Settings {
- self: Context =>
-
- def settings: List[String] = {
- val us = universe.settings
- import us._
- userSetSettings collectFirst { case x: MultiStringSetting if x.name == XmacroSettings.name => x.value } getOrElse Nil
- }
-
- def compilerSettings: List[String] = universe.settings.recreateArgs
-
- def setCompilerSettings(options: String): this.type =
- // todo. is not going to work with quoted arguments with embedded whitespaces
- setCompilerSettings(options.split(" ").toList)
-
- def setCompilerSettings(options: List[String]): this.type = {
- val settings = new tools.nsc.Settings(_ => ())
- // [Eugene] what settings should we exclude?
- settings.copyInto(universe.settings)
- this
- }
-
- def withCompilerSettings[T](options: String)(op: => T): T =
- // todo. is not going to work with quoted arguments with embedded whitespaces
- withCompilerSettings(options.split(" ").toList)(op)
-
- def withCompilerSettings[T](options: List[String])(op: => T): T = {
- val old = options
- setCompilerSettings(options)
- try op
- finally setCompilerSettings(old)
- }
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/TypeTags.scala b/src/compiler/scala/reflect/macros/runtime/TypeTags.scala
deleted file mode 100644
index 2bc2fe6384..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/TypeTags.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait TypeTags {
- self: Context =>
-
- def AbsTypeTag[T](tpe: Type): AbsTypeTag[T] = universe.AbsTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
- def TypeTag[T](tpe: Type): TypeTag[T] = universe.TypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
-}
diff --git a/src/compiler/scala/reflect/macros/util/Traces.scala b/src/compiler/scala/reflect/macros/util/Traces.scala
index 6c2f115994..d16916b753 100644
--- a/src/compiler/scala/reflect/macros/util/Traces.scala
+++ b/src/compiler/scala/reflect/macros/util/Traces.scala
@@ -2,17 +2,12 @@ package scala.reflect.macros
package util
trait Traces {
- def globalSettings: tools.nsc.Settings
+ def globalSettings: scala.tools.nsc.Settings
- // [Eugene] lots of ways to log:
- // 1) trace(...)
- // 2) log(...)
- // 3) if (foo) { doStuff(); includingSomeLogs(); }
- // what is the conventional way of unifying this?
val macroDebugLite = globalSettings.YmacrodebugLite.value
val macroDebugVerbose = globalSettings.YmacrodebugVerbose.value
val macroTraceLite = scala.tools.nsc.util.trace when (macroDebugLite || macroDebugVerbose)
val macroTraceVerbose = scala.tools.nsc.util.trace when macroDebugVerbose
@inline final def macroLogLite(msg: => Any) { if (macroDebugLite || macroDebugVerbose) println(msg) }
@inline final def macroLogVerbose(msg: => Any) { if (macroDebugVerbose) println(msg) }
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 5e15c5ad3a..73c13901b6 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -27,8 +27,8 @@ trait Errors {
throw new ReificationError(defaultErrorPosition, msg)
}
- def CannotReifyTypeTagHavingUnresolvedTypeParameters(tpe: Type) = {
- val msg = "cannot reify TypeTag having unresolved type parameter %s".format(tpe)
+ def CannotReifyWeakType(details: Any) = {
+ val msg = "cannot create a TypeTag" + details
throw new ReificationError(defaultErrorPosition, msg)
}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 53e01309cb..f48fcd8ada 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -52,7 +52,6 @@ abstract class Reifier extends States
*/
lazy val reification: Tree = {
try {
- // [Eugene] conventional way of doing this?
if (universe exists (_.isErroneous)) CannotReifyErroneousPrefix(universe)
if (universe.tpe == null) CannotReifyUntypedPrefix(universe)
@@ -62,7 +61,6 @@ abstract class Reifier extends States
reifyTrace("reifee is located at: ")(tree.pos)
reifyTrace("universe = ")(universe)
reifyTrace("mirror = ")(mirror)
- // [Eugene] conventional way of doing this?
if (tree exists (_.isErroneous)) CannotReifyErroneousReifee(tree)
if (tree.tpe == null) CannotReifyUntypedReifee(tree)
val pipeline = mkReificationPipeline
@@ -108,14 +106,10 @@ abstract class Reifier extends States
//
// todo. this is a common problem with non-trivial macros in our current macro system
// needs to be solved some day
- //
- // list of non-hygienic transformations:
- // todo. to be updated
- // [Eugene++] yeah, ugly and extremely brittle, but we do need to do resetAttrs. will be fixed later
- // todo. maybe try `resetLocalAttrs` once the dust settles
+ // maybe try `resetLocalAttrs` once the dust settles
var importantSymbols = Set[Symbol](
- NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorOfClass,
- BaseUniverseClass, JavaUniverseClass, ReflectRuntimePackage, ReflectRuntimeCurrentMirror)
+ NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass,
+ ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, ReflectRuntimeCurrentMirror)
importantSymbols ++= importantSymbols map (_.companionSymbol)
importantSymbols ++= importantSymbols map (_.moduleClass)
importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
index a01cfe5d74..58455c9f3c 100644
--- a/src/compiler/scala/reflect/reify/States.scala
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -34,9 +34,11 @@ trait States {
def reificationIsConcrete_=(value: Boolean): Unit = {
_reificationIsConcrete = value
if (!value && concrete) {
- assert(current.isInstanceOf[Type], current)
- val offender = current.asInstanceOf[Type]
- CannotReifyTypeTagHavingUnresolvedTypeParameters(offender)
+ current match {
+ case tpe: Type => CannotReifyWeakType(s" having unresolved type parameter $tpe")
+ case sym: Symbol => CannotReifyWeakType(s" referring to local ${sym.kindString} ${sym.fullName}")
+ case _ => CannotReifyWeakType("")
+ }
}
}
var reifyStack = reifee :: Nil
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index a8523fe686..7db6394734 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -27,9 +27,9 @@ abstract class Taggers {
NothingTpe -> nme.Nothing,
NullTpe -> nme.Null)
- def materializeClassTag(prefix: Tree, tpe: Type): Tree = {
+ def materializeClassTag(tpe: Type): Tree = {
val tagModule = ClassTagModule
- materializeTag(prefix, tpe, tagModule, {
+ materializeTag(EmptyTree, tpe, tagModule, {
val erasure = c.reifyRuntimeClass(tpe, concrete = true)
val factory = TypeApply(Select(Ident(tagModule), nme.apply), List(TypeTree(tpe)))
Apply(factory, List(erasure))
@@ -37,20 +37,20 @@ abstract class Taggers {
}
def materializeTypeTag(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean): Tree = {
- val tagType = if (concrete) TypeTagClass else AbsTypeTagClass
- // what we need here is to compose a type BaseUniverse # TypeTag[$tpe]
+ val tagType = if (concrete) TypeTagClass else WeakTypeTagClass
+ // what we need here is to compose a type Universe # TypeTag[$tpe]
// to look for an implicit that conforms to this type
// that's why neither appliedType(tagType, List(tpe)) aka TypeRef(TypeTagsClass.thisType, tagType, List(tpe))
- // nor TypeRef(BaseUniverseClass.thisType, tagType, List(tpe)) won't fit here
- // scala> :type -v def foo: scala.reflect.base.Universe#TypeTag[Int] = ???
+ // nor TypeRef(ApiUniverseClass.thisType, tagType, List(tpe)) won't fit here
+ // scala> :type -v def foo: scala.reflect.api.Universe#TypeTag[Int] = ???
// NullaryMethodType(TypeRef(pre = TypeRef(TypeSymbol(Universe)), TypeSymbol(TypeTag), args = List($tpe))))
- val unaffiliatedTagTpe = TypeRef(BaseUniverseClass.typeConstructor, tagType, List(tpe))
+ val unaffiliatedTagTpe = TypeRef(ApiUniverseClass.typeConstructor, tagType, List(tpe))
val unaffiliatedTag = c.inferImplicitValue(unaffiliatedTagTpe, silent = true, withMacrosDisabled = true)
unaffiliatedTag match {
case success if !success.isEmpty =>
Apply(Select(success, nme.in), List(mirror orElse mkDefaultMirrorRef(c.universe)(universe, c.callsiteTyper)))
case _ =>
- val tagModule = if (concrete) TypeTagModule else AbsTypeTagModule
+ val tagModule = if (concrete) TypeTagModule else WeakTypeTagModule
materializeTag(universe, tpe, tagModule, c.reifyType(universe, mirror, tpe, concrete = concrete))
}
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index 5f4296f54f..dec491aabe 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -50,6 +50,6 @@ trait GenAnnotationInfos {
// if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
- mirrorFactoryCall(nme.AnnotationInfo, reify(ann.atp), mkList(reifiedArgs), mkList(reifiedAssocs))
+ mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 59651bcdf9..22a834d2e4 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -36,7 +36,8 @@ trait GenSymbols {
else if (sym.isEmptyPackageClass)
mirrorMirrorSelect(nme.EmptyPackageClass)
else if (sym.isModuleClass)
- Select(Select(reify(sym.sourceModule), nme.asModule), nme.moduleClass)
+ if (sym.sourceModule.isLocatable) Select(Select(reify(sym.sourceModule), nme.asModule), nme.moduleClass)
+ else reifySymDef(sym)
else if (sym.isPackage)
mirrorMirrorCall(nme.staticPackage, reify(sym.fullName))
else if (sym.isLocatable) {
@@ -71,6 +72,7 @@ trait GenSymbols {
*/
val hasPackagelessParent = sym.ownerChain.tail.tail exists (_.isEmptyPackageClass)
if (sym.isStatic && (sym.isClass || sym.isModule) && !hasPackagelessParent) {
+ // SI-6238: if applicable, emit references to StandardDefinitions instead of staticClass/staticModule calls
val resolver = if (sym.isType) nme.staticClass else nme.staticModule
mirrorMirrorCall(resolver, reify(sym.fullName))
} else {
@@ -88,49 +90,61 @@ trait GenSymbols {
}
} else {
// todo. make sure that free methods and free local defs work correctly
- if (sym.isTerm) reifyFreeTerm(sym, Ident(sym))
- else reifyFreeType(sym, Ident(sym))
+ if (sym.isExistential) reifySymDef(sym)
+ else if (sym.isTerm) reifyFreeTerm(Ident(sym))
+ else reifyFreeType(Ident(sym))
}
}
- def reifyFreeTerm(sym: Symbol, value: Tree): Tree =
- reifyIntoSymtab(sym) {
+ def reifyFreeTerm(binding: Tree): Tree =
+ reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
- var name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
- if (sym.isType) name = name.append(nme.REIFY_FREE_THIS_SUFFIX)
+ val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
if (sym.isCapturedVariable) {
- assert(value.isInstanceOf[Ident], showRaw(value))
- val capturedTpe = capturedVariableType(sym)
- val capturedValue = referenceCapturedVariable(sym)
- (name, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), reify(capturedTpe), capturedValue, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ assert(binding.isInstanceOf[Ident], showRaw(binding))
+ val capturedBinding = referenceCapturedVariable(sym)
+ Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
} else {
- (name, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), reify(sym.tpe), value, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
}
}
- def reifyFreeType(sym: Symbol, value: Tree): Tree =
- reifyIntoSymtab(sym) {
+ def reifyFreeType(binding: Tree): Tree =
+ reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
- var name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
- val phantomTypeTag = Apply(TypeApply(Select(Ident(nme.UNIVERSE_SHORT), nme.TypeTag), List(value)), List(Literal(Constant(null)), Literal(Constant(null))))
- val flavor = if (sym.isExistential) nme.newFreeExistential else nme.newFreeType
- (name, mirrorBuildCall(flavor, reify(sym.name.toString), reify(sym.info), phantomTypeTag, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ state.reificationIsConcrete = false
+ val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
+ Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
}
def reifySymDef(sym: Symbol): Tree =
- reifyIntoSymtab(sym) {
+ reifyIntoSymtab(sym) { sym =>
if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
- assert(!sym.isLocatable, sym) // if this assertion fires, then tough type reification needs to be rethought
- sym.owner.ownersIterator find (!_.isLocatable) foreach reifySymDef
- var name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
- (name, mirrorBuildCall(nme.newNestedSymbol, reify(sym.owner), reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
+ val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+ def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
+ Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
}
- private def reifyIntoSymtab(sym: Symbol)(reificode: => (TermName, Tree)): Tree ={
+ case class Reification(name: Name, binding: Tree, tree: Tree)
+
+ private def reifyIntoSymtab(sym: Symbol)(reificode: Symbol => Reification): Tree = {
def fromSymtab = symtab symRef sym
if (fromSymtab == EmptyTree) {
- val reification = reificode
- state.symtab += (sym, reification._1, reification._2)
+ // reification is lazy, so that we can carefully choose where to evaluate it
+ // and we choose this place to be exactly here:
+ //
+ // reasons:
+ // 1) reification happens at maximum once per symbol to prevent repeated reifications
+ // 2) reification happens before putting the symbol itself into the symbol table to ensure correct initialization order:
+ // for example, if reification of symbol A refers to reification of symbol B
+ // (this might happen when we're doing `reifySymDef`, which expands into `newNestedSymbol`, which needs `sym.owner`)
+ // then we have to put reification-B into the symbol table before reification-A
+ // so that subsequent code generation that traverses the symbol table in the first-added first-codegenned order
+ // produces valid Scala code (with vals in a block depending only on lexically preceding vals)
+ val reification = reificode(sym)
+ import reification.{name, binding}
+ val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
+ state.symtab += (sym, name, tree)
}
fromSymtab
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index f48df8df65..bdcc7383b0 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -124,7 +124,7 @@ trait GenTrees {
val sym = tree.symbol
if (reifyDebug) println("This for %s, reified as freeVar".format(sym))
if (reifyDebug) println("Free: " + sym)
- mirrorBuildCall(nme.Ident, reifyFreeTerm(sym, This(sym)))
+ mirrorBuildCall(nme.Ident, reifyFreeTerm(This(sym)))
case tree @ This(_) if !tree.symbol.isLocalToReifee =>
if (reifyDebug) println("This for %s, reified as This".format(tree.symbol))
mirrorBuildCall(nme.This, reify(tree.symbol))
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index c762a28f99..7aa87dc2f8 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -23,7 +23,7 @@ trait GenTypes {
if (isSemiConcreteTypeMember(tpe))
return reifySemiConcreteTypeMember(tpe)
- // [Eugene] how do I check that the substitution is legal w.r.t tpe.info?
+ // SI-6242: splicing might violate type bounds
val spliced = spliceType(tpe)
if (spliced != EmptyTree)
return spliced
@@ -69,12 +69,11 @@ trait GenTypes {
def reificationIsConcrete: Boolean = state.reificationIsConcrete
def spliceType(tpe: Type): Tree = {
- // [Eugene] it seems that depending on the context the very same symbol can be either a spliceable tparam or a quantified existential. very weird!
val quantified = currentQuantified
if (tpe.isSpliceable && !(quantified contains tpe.typeSymbol)) {
if (reifyDebug) println("splicing " + tpe)
- val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.AbsTypeTag.toString
+ val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
val key = (tagFlavor, tpe.typeSymbol)
// if this fails, it might produce the dreaded "erroneous or inaccessible type" error
// to find out the whereabouts of the error run scalac with -Ydebug
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 2b7733fb6c..49877b4286 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -70,6 +70,9 @@ trait GenUtils {
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
+ def mkListMap(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.ListMap", args: _*)
+
/**
* An (unreified) path that refers to definition with given fully qualified name
* @param mkName Creator for last portion of name (either TermName or TypeName)
@@ -131,7 +134,7 @@ trait GenUtils {
def isCrossStageTypeBearer(tree: Tree): Boolean = tree match {
case TypeApply(hk, _) => isCrossStageTypeBearer(hk)
- case Select(sym @ Select(_, ctor), nme.apply) if ctor == nme.AbsTypeTag || ctor == nme.TypeTag || ctor == nme.Expr => true
+ case Select(sym @ Select(_, ctor), nme.apply) if ctor == nme.WeakTypeTag || ctor == nme.TypeTag || ctor == nme.Expr => true
case _ => false
}
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index a253effc1c..5a23ab7214 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,7 +1,6 @@
package scala.reflect
-import language.implicitConversions
-import scala.reflect.base.{Universe => BaseUniverse}
+import scala.language.implicitConversions
import scala.reflect.macros.{Context, ReificationError, UnexpectedReificationError}
import scala.tools.nsc.Global
@@ -26,7 +25,14 @@ package object reify {
private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
import global._
import definitions._
- val enclosingErasure = reifyEnclosingRuntimeClass(global)(typer0)
+ val enclosingErasure = {
+ val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
+ // HACK around SI-6259
+ // If we're in the constructor of an object or others don't have easy access to `this`, we have no good way to grab
+ // the class of that object. Instead, we construct an anonymous class and grab his class file, assuming
+ // this is enough to get the correct class loadeer for the class we *want* a mirror for, the object itself.
+ rClassTree orElse Apply(Select(treeBuilder.makeAnonymousNew(Nil), sn.GetClass), Nil)
+ }
// JavaUniverse is defined in scala-reflect.jar, so we must be very careful in case someone reifies stuff having only scala-library.jar on the classpath
val isJavaUniverse = JavaUniverseClass != NoSymbol && universe.tpe <:< JavaUniverseClass.toTypeConstructor
if (isJavaUniverse && !enclosingErasure.isEmpty) Apply(Select(universe, nme.runtimeMirror), List(Select(enclosingErasure, sn.GetClassLoader)))
@@ -61,15 +67,24 @@ package object reify {
}
}
+ // Note: If current context is inside the constructor of an object or otherwise not inside
+ // a class/object body, this will return an EmptyTree.
def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
import global._
import definitions._
- def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[Template])
+ def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef])
if (isThisInScope) {
val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
val classInScope = enclosingClasses.headOption getOrElse EmptyTree
+ def isUnsafeToUseThis = {
+ val isInsideConstructorSuper = typer0.context.enclosingContextChain exists (_.inSelfSuperCall)
+ // Note: It's ok to check for any object here, because if we were in an enclosing class, we'd already have returned its classOf
+ val isInsideObject = typer0.context.enclosingContextChain map (_.tree) exists { case _: ModuleDef => true; case _ => false }
+ isInsideConstructorSuper && isInsideObject
+ }
if (!classInScope.isEmpty) reifyRuntimeClass(global)(typer0, classInScope.symbol.toTypeConstructor, concrete = true)
- else Select(This(tpnme.EMPTY), sn.GetClass)
+ else if(!isUnsafeToUseThis) Select(This(tpnme.EMPTY), sn.GetClass)
+ else EmptyTree
} else EmptyTree
}
}
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
index 41cf6c066a..4d1e22abe7 100644
--- a/src/compiler/scala/reflect/reify/phases/Calculate.scala
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -9,7 +9,7 @@ trait Calculate {
implicit class RichCalculateSymbol(sym: Symbol) {
def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
- def isLocalToReifee = (localSymbols contains sym) // [Eugene] how do I account for local skolems?
+ def isLocalToReifee = (localSymbols contains sym) // todo. how do I account for local skolems?
}
implicit class RichCalculateType(tpe: Type) {
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index 1624bbe951..fbbd12a42f 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -102,7 +102,7 @@ trait Metalevels {
*/
val metalevels = new Transformer {
var insideSplice = false
- var inlineableBindings = collection.mutable.Map[TermName, Tree]()
+ var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
def withinSplice[T](op: => T) = {
val old = insideSplice
@@ -147,4 +147,4 @@ trait Metalevels {
super.transform(tree)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index e26dd7e227..b5894e8eb6 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -101,11 +101,11 @@ trait Reshape {
// hence we cannot reify references to them, because noone will be able to see them later
// when implicit macros are fixed, these sneaky macros will move to corresponding companion objects
// of, say, ClassTag or TypeTag
- case Apply(TypeApply(_, List(tt)), _) if original.symbol == MacroInternal_materializeClassTag =>
+ case Apply(TypeApply(_, List(tt)), _) if original.symbol == materializeClassTag =>
gen.mkNullaryCall(Predef_implicitly, List(appliedType(ClassTagClass, tt.tpe)))
- case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == MacroInternal_materializeAbsTypeTag =>
- gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, AbsTypeTagClass, List(tt.tpe))))
- case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == MacroInternal_materializeTypeTag =>
+ case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == materializeWeakTypeTag =>
+ gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe))))
+ case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == materializeTypeTag =>
gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, TypeTagClass, List(tt.tpe))))
case _ =>
original
@@ -248,10 +248,9 @@ trait Reshape {
New(TypeTree(ann.atp) setOriginal extractOriginal(ann.original), List(args))
}
- // [Eugene] is this implemented correctly?
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap
- val accessors = collection.mutable.Map[ValDef, List[DefDef]]()
+ val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]()
stats collect { case ddef: DefDef => ddef } foreach (defdef => {
val valdef = symdefs get defdef.symbol.accessedOrSelf collect { case vdef: ValDef => vdef } getOrElse null
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
@@ -287,7 +286,7 @@ trait Reshape {
val name1 = nme.dropLocalSuffix(name)
val vdef1 = ValDef(mods2, name1, tpt, rhs)
if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
- Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are not out of sync
+ Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
if (accessors.values.exists(_.contains(ddef))) {
if (reifyDebug) println("discarding accessor method: " + ddef)
@@ -324,4 +323,4 @@ trait Reshape {
isSynthetic && isCaseCompanion
}))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index ebe3957e69..b7206eda0e 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -14,12 +14,12 @@ trait Extractors {
// val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
// val $m: $u.Mirror = $u.runtimeMirror(Test.this.getClass().getClassLoader());
// $u.Expr[List[Int]]($m, {
- // final class $treecreator1 extends scala.reflect.base.TreeCreator {
+ // final class $treecreator1 extends scala.reflect.api.TreeCreator {
// def <init>(): $treecreator1 = {
// $treecreator1.super.<init>();
// ()
// };
- // def apply[U >: Nothing <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Tree = {
+ // def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
// val $u: U = $m$untyped.universe;
// val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
// $u.Apply($u.Select($u.Select($u.build.This($m.staticPackage("scala.collection.immutable").moduleClass), $u.newTermName("List")), $u.newTermName("apply")), List($u.Literal($u.Constant(1)), $u.Literal($u.Constant(2))))
@@ -27,12 +27,12 @@ trait Extractors {
// };
// new $treecreator1()
// })($u.TypeTag[List[Int]]($m, {
- // final class $typecreator1 extends scala.reflect.base.TypeCreator {
+ // final class $typecreator1 extends scala.reflect.api.TypeCreator {
// def <init>(): $typecreator1 = {
// $typecreator1.super.<init>();
// ()
// };
- // def apply[U >: Nothing <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Type = {
+ // def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
// val $u: U = $m$untyped.universe;
// val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
// $u.TypeRef($u.ThisType($m.staticPackage("scala.collection.immutable").moduleClass), $m.staticClass("scala.collection.immutable.List"), List($m.staticClass("scala.Int").toTypeConstructor))
@@ -45,8 +45,8 @@ trait Extractors {
private def mkCreator(flavor: TypeName, symtab: SymbolTable, rtree: Tree): Tree = {
val tparamu = newTypeName("U")
val (reifierBase, reifierName, reifierTpt, reifierUniverse) = flavor match {
- case tpnme.REIFY_TYPECREATOR_PREFIX => (TypeCreatorClass, nme.apply, SelectFromTypeTree(Ident(tparamu), tpnme.Type), BaseUniverseClass)
- case tpnme.REIFY_TREECREATOR_PREFIX => (TreeCreatorClass, nme.apply, SelectFromTypeTree(Ident(tparamu), tpnme.Tree), BaseUniverseClass)
+ case tpnme.REIFY_TYPECREATOR_PREFIX => (TypeCreatorClass, nme.apply, SelectFromTypeTree(Ident(tparamu), tpnme.Type), ApiUniverseClass)
+ case tpnme.REIFY_TREECREATOR_PREFIX => (TreeCreatorClass, nme.apply, SelectFromTypeTree(Ident(tparamu), tpnme.Tree), ApiUniverseClass)
case _ => throw new Error(s"unexpected flavor $flavor")
}
val reifierBody = {
@@ -81,7 +81,7 @@ trait Extractors {
DefDef(NoMods,
reifierName,
List(TypeDef(Modifiers(PARAM), tparamu, List(), TypeBoundsTree(Ident(NothingClass), CompoundTypeTree(Template(List(Ident(reifierUniverse), Ident(SingletonClass)), emptyValDef, List()))))),
- List(List(ValDef(Modifiers(PARAM), nme.MIRROR_UNTYPED, AppliedTypeTree(Ident(MirrorOfClass), List(Ident(tparamu))), EmptyTree))),
+ List(List(ValDef(Modifiers(PARAM), nme.MIRROR_UNTYPED, AppliedTypeTree(Ident(MirrorClass), List(Ident(tparamu))), EmptyTree))),
reifierTpt, reifierBody))))
Block(tpec, ApplyConstructor(Ident(tpec.name), List()))
}
@@ -94,7 +94,7 @@ trait Extractors {
object ReifiedTree {
def apply(universe: Tree, mirror: Tree, symtab: SymbolTable, rtree: Tree, tpe: Type, rtpe: Tree, concrete: Boolean): Tree = {
- val tagFactory = if (concrete) nme.TypeTag else nme.AbsTypeTag
+ val tagFactory = if (concrete) nme.TypeTag else nme.WeakTypeTag
val tagCtor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), tagFactory), nme.apply), List(TypeTree(tpe)))
val exprCtor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), nme.Expr), nme.apply), List(TypeTree(tpe)))
val tagArgs = List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TYPECREATOR_PREFIX, symtab, rtpe))
@@ -114,7 +114,7 @@ trait Extractors {
case Select(Select(_, tagFlavor), _) => tagFlavor
case Select(_, tagFlavor) => tagFlavor
}
- Some(universe, mirror, SymbolTable(symbolTable1 ++ symbolTable2), rtree, ttpe.tpe, rtpe, tagFlavor == nme.TypeTag)
+ Some((universe, mirror, SymbolTable(symbolTable1 ++ symbolTable2), rtree, ttpe.tpe, rtpe, tagFlavor == nme.TypeTag))
case _ =>
None
}
@@ -122,7 +122,7 @@ trait Extractors {
object ReifiedType {
def apply(universe: Tree, mirror: Tree, symtab: SymbolTable, tpe: Type, rtpe: Tree, concrete: Boolean) = {
- val tagFactory = if (concrete) nme.TypeTag else nme.AbsTypeTag
+ val tagFactory = if (concrete) nme.TypeTag else nme.WeakTypeTag
val ctor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), tagFactory), nme.apply), List(TypeTree(tpe)))
val args = List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TYPECREATOR_PREFIX, symtab, rtpe))
val unwrapped = Apply(ctor, args)
@@ -139,7 +139,7 @@ trait Extractors {
case Select(Select(_, tagFlavor), _) => tagFlavor
case Select(_, tagFlavor) => tagFlavor
}
- Some(universe, mirror, SymbolTable(symtab), ttpe.tpe, rtpe, tagFlavor == nme.TypeTag)
+ Some((universe, mirror, SymbolTable(symtab), ttpe.tpe, rtpe, tagFlavor == nme.TypeTag))
case _ =>
None
}
@@ -160,9 +160,9 @@ trait Extractors {
object FreeDef {
def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
case FreeTermDef(uref, name, binding, flags, origin) =>
- Some(uref, name, binding, flags, origin)
+ Some((uref, name, binding, flags, origin))
case FreeTypeDef(uref, name, binding, flags, origin) =>
- Some(uref, name, binding, flags, origin)
+ Some((uref, name, binding, flags, origin))
case _ =>
None
}
@@ -176,12 +176,11 @@ trait Extractors {
List(
_,
_,
- binding,
Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
Literal(Constant(origin: String)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, binding, flags, origin)
+ Some(uref1, name, reifyBinding(tree), flags, origin)
case _ =>
None
}
@@ -194,22 +193,11 @@ trait Extractors {
Select(Select(uref1 @ Ident(_), build1), newFreeType),
List(
_,
- _,
- value,
Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
Literal(Constant(origin: String)))))
- if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && (newFreeType == nme.newFreeType || newFreeType == nme.newFreeExistential) &&
+ if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- value match {
- case Apply(TypeApply(Select(Select(uref3 @ Ident(_), typeTag), apply), List(binding)), List(Literal(Constant(null)), _))
- if uref3.name == nme.UNIVERSE_SHORT && typeTag == nme.TypeTag && apply == nme.apply =>
- Some(uref1, name, binding, flags, origin)
- case Apply(TypeApply(Select(uref3 @ Ident(_), typeTag), List(binding)), List(Literal(Constant(null)), _))
- if uref3.name == nme.UNIVERSE_SHORT && typeTag == nme.TypeTag =>
- Some(uref1, name, binding, flags, origin)
- case _ =>
- throw new Error("unsupported free type def: %s%n%s".format(value, showRaw(value)))
- }
+ Some(uref1, name, reifyBinding(tree), flags, origin)
case _ =>
None
}
@@ -219,7 +207,7 @@ trait Extractors {
def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
case Apply(Select(Select(uref @ Ident(_), build), ident), List(Ident(name: TermName)))
if build == nme.build && ident == nme.Ident && name.startsWith(nme.REIFY_FREE_PREFIX) =>
- Some(uref, name)
+ Some((uref, name))
case _ =>
None
}
@@ -238,7 +226,7 @@ trait Extractors {
Literal(Constant(isClass: Boolean)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newNestedSymbol == nme.newNestedSymbol &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, flags, isClass)
+ Some((uref1, name, flags, isClass))
case _ =>
None
}
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index 420f55c0e0..000e500c69 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -23,6 +23,7 @@ trait NodePrinters {
// depended upon. Of more fragile code I cannot conceive.
// @Eugene: This stuff is only needed to debug-print out reifications in human-readable format
// Rolling a full-fledged, robust TreePrinter would be several times more code.
+ // Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier.
val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2
var (List(universe, mirror), reification) = lines
reification = (for (line <- reification) yield {
@@ -39,7 +40,7 @@ trait NodePrinters {
})
s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
- val buf = new collection.mutable.ListBuffer[String]
+ val buf = new scala.collection.mutable.ListBuffer[String]
val annotations = m.group(3)
if (buf.nonEmpty || annotations != "")
@@ -72,10 +73,10 @@ trait NodePrinters {
s.trim
})
- val printout = collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]();
printout += universe.trim
- if (mirrorIsUsed) printout += mirror.replace("MirrorOf[", "scala.reflect.base.MirrorOf[").trim
- val imports = collection.mutable.ListBuffer[String]();
+ if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
+ val imports = scala.collection.mutable.ListBuffer[String]();
imports += nme.UNIVERSE_SHORT
// if (buildIsUsed) imports += nme.build
if (mirrorIsUsed) imports += nme.MIRROR_SHORT
@@ -93,7 +94,7 @@ trait NodePrinters {
if (isExpr) {
if (mirror contains ".getClassLoader") {
printout += "import scala.tools.reflect.ToolBox"
- printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().runExpr(tree))"
+ printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().eval(tree))"
} else {
printout += "println(tree)"
}
diff --git a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
index abbed814e0..0b9cf58c89 100644
--- a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
+++ b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
@@ -6,7 +6,13 @@ trait StdAttachments {
import global._
- case class ReifyBindingAttachment(binding: Symbol)
+ case class ReifyBindingAttachment(binding: Tree)
- case class ReifyAliasAttachment(binding: Symbol, alias: TermName)
+ def reifyBinding(tree: Tree): Tree =
+ tree.attachments.get[ReifyBindingAttachment] match {
+ case Some(ReifyBindingAttachment(binding)) => binding
+ case other => Ident(NoSymbol)
+ }
+
+ case class ReifyAliasAttachment(sym: Symbol, alias: TermName)
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index a7ac299317..3ec43c863d 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -17,6 +17,7 @@ trait SymbolTables {
private[SymbolTable] val original: Option[List[Tree]] = None) {
def syms: List[Symbol] = symtab.keys.toList
+ def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
@@ -45,28 +46,27 @@ trait SymbolTables {
def symRef(sym: Symbol): Tree =
symtab.get(sym) match {
- case Some(FreeDef(_, name, _, _, _)) => Ident(name) addAttachment ReifyBindingAttachment(sym)
- case Some(SymDef(_, name, _, _)) => Ident(name) addAttachment ReifyBindingAttachment(sym)
+ case Some(FreeDef(_, name, binding, _, _)) => Ident(name) updateAttachment binding
+ case Some(SymDef(_, name, _, _)) => Ident(name) updateAttachment ReifyBindingAttachment(Ident(sym))
case None => EmptyTree
}
def +(sym: Symbol, name: TermName, reification: Tree): SymbolTable = add(sym, name, reification)
- def +(sym: Symbol, name: TermName): SymbolTable = add(sym, name)
def +(symDef: Tree): SymbolTable = add(symDef)
def ++(symDefs: TraversableOnce[Tree]): SymbolTable = (this /: symDefs)((symtab, symDef) => symtab.add(symDef))
def ++(symtab: SymbolTable): SymbolTable = { val updated = this ++ symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases ++ symtab.aliases) }
def -(sym: Symbol): SymbolTable = remove(sym)
def -(name: TermName): SymbolTable = remove(name)
- def -(symDef: Tree): SymbolTable = remove(binding(symDef))
+ def -(symDef: Tree): SymbolTable = remove(reifyBinding(symDef).symbol)
def --(syms: GenTraversableOnce[Symbol]): SymbolTable = (this /: syms)((symtab, sym) => symtab.remove(sym))
def --(names: Iterable[TermName]): SymbolTable = (this /: names)((symtab, name) => symtab.remove(name))
- def --(symDefs: TraversableOnce[Tree]): SymbolTable = this -- (symDefs map (binding(_)))
+ def --(symDefs: TraversableOnce[Tree]): SymbolTable = this -- (symDefs map (reifyBinding(_)))
def --(symtab: SymbolTable): SymbolTable = { val updated = this -- symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases diff symtab.aliases) }
def filterSyms(p: Symbol => Boolean): SymbolTable = this -- (syms filterNot p)
def filterAliases(p: (Symbol, TermName) => Boolean): SymbolTable = this -- (aliases filterNot (tuple => p(tuple._1, tuple._2)) map (_._2))
private def add(symDef: Tree): SymbolTable = {
- val sym = binding(symDef)
+ val sym = reifyBinding(symDef).symbol
assert(sym != NoSymbol, showRaw(symDef))
val name = symDef match {
case FreeDef(_, name, _, _, _) => name
@@ -85,7 +85,8 @@ trait SymbolTables {
val fresh = typer.context.unit.fresh
newTermName(fresh.newName(name))
}
- add(ValDef(NoMods, freshName(name0), TypeTree(), reification) addAttachment ReifyBindingAttachment(sym))
+ val bindingAttachment = reification.attachments.get[ReifyBindingAttachment].get
+ add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
private def add(sym: Symbol, name: TermName): SymbolTable = {
@@ -115,12 +116,6 @@ trait SymbolTables {
new SymbolTable(newSymtab, newAliases)
}
- private def binding(tree: Tree): Symbol =
- tree.attachments.get[ReifyBindingAttachment] match {
- case Some(ReifyBindingAttachment(binding)) => binding
- case other => NoSymbol
- }
-
private val cache = mutable.Map[SymbolTable, List[Tree]]()
def encode: List[Tree] = cache.getOrElseUpdate(this, SymbolTable.encode(this)) map (_.duplicate)
@@ -147,7 +142,7 @@ trait SymbolTables {
def apply(encoded: List[Tree]): SymbolTable = {
var result = new SymbolTable(original = Some(encoded))
encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
- case (Some(ReifyBindingAttachment(sym)), _) => result += entry
+ case (Some(ReifyBindingAttachment(_)), _) => result += entry
case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
})
@@ -169,27 +164,26 @@ trait SymbolTables {
def fillInSymbol(sym: Symbol): Tree = {
if (reifyDebug) println("Filling in: %s (%s)".format(sym, sym.accurateKindString))
- val isFree = currtab.symName(sym) startsWith nme.REIFY_FREE_PREFIX
- if (isFree) {
- if (sym.annotations.isEmpty) EmptyTree
- else Apply(Select(currtab.symRef(sym), nme.setAnnotations), List(reifier.reify(sym.annotations)))
- } else {
- import scala.reflect.internal.Flags._
- if (sym hasFlag LOCKED) {
- // [Eugene] better to have a symbol without a type signature, than to crash with a CyclicReference
- EmptyTree
- } else {
- val rset = reifier.mirrorBuildCall(nme.setTypeSignature, currtab.symRef(sym), reifier.reify(sym.info))
- if (sym.annotations.isEmpty) rset
- else reifier.mirrorBuildCall(nme.setAnnotations, rset, reifier.mkList(sym.annotations map reifier.reifyAnnotationInfo))
- }
- }
+ val isFreeTerm = FreeTermDef.unapply(currtab.symDef(sym)).isDefined
+ // SI-6204 don't reify signatures for incomplete symbols, because this might lead to cyclic reference errors
+ val signature =
+ if (sym.isInitialized) {
+ if (sym.isCapturedVariable) capturedVariableType(sym)
+ else if (isFreeTerm) sym.tpe
+ else sym.info
+ } else NoType
+ val rset = reifier.mirrorBuildCall(nme.setTypeSignature, currtab.symRef(sym), reifier.reify(signature))
+ // `Symbol.annotations` doesn't initialize the symbol, so we don't need to do anything special here
+ // also since we call `sym.info` a few lines above, by now the symbol will be initialized (if possible)
+ // so the annotations will be filled in and will be waiting to be reified (unless symbol initialization is prohibited as described above)
+ if (sym.annotations.isEmpty) rset
+ else reifier.mirrorBuildCall(nme.setAnnotations, rset, reifier.mkList(sym.annotations map reifier.reifyAnnotationInfo))
}
// `fillInSymbol` might add symbols to `symtab`, that's why this is done iteratively
var progress = 0
while (progress < cumulativeSymtab.length) {
- val sym = currtab.binding(cumulativeSymtab(progress))
+ val sym = reifyBinding(cumulativeSymtab(progress)).symbol
if (sym != NoSymbol) {
val symtabProgress = currtab.symtab.size
val aliasesProgress = currtab.aliases.length
@@ -204,12 +198,12 @@ trait SymbolTables {
val withAliases = cumulativeSymtab flatMap (entry => {
val result = mutable.ListBuffer[Tree]()
result += entry
- val sym = currtab.binding(entry)
+ val sym = reifyBinding(entry).symbol
if (sym != NoSymbol)
result ++= cumulativeAliases.distinct filter (alias => alias._1 == sym && alias._2 != currtab.symName(sym)) map (alias => {
val canonicalName = currtab.symName(sym)
val aliasName = alias._2
- ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) addAttachment ReifyAliasAttachment(sym, aliasName)
+ ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) updateAttachment ReifyAliasAttachment(sym, aliasName)
})
result.toList
})
diff --git a/src/compiler/scala/tools/ant/ClassloadVerify.scala b/src/compiler/scala/tools/ant/ClassloadVerify.scala
index 33a20f6894..d1d557b9d3 100644
--- a/src/compiler/scala/tools/ant/ClassloadVerify.scala
+++ b/src/compiler/scala/tools/ant/ClassloadVerify.scala
@@ -10,7 +10,7 @@ package scala.tools.ant
import org.apache.tools.ant.Project
import org.apache.tools.ant.types.{Path, Reference}
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
import scala.tools.util.VerifyClass
class ClassloadVerify extends ScalaMatchingTask {
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index b96ac6f29b..b2c6441222 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -44,7 +44,8 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `docgenerator`,
* - `docrootcontent`,
* - `unchecked`,
- * - `nofail`.
+ * - `nofail`,
+ * - `skipPackages`.
*
* It also takes the following parameters as nested elements:
* - `src` (for srcdir),
@@ -159,6 +160,9 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the scaladoc tool to group similar functions together */
private var docGroups: Boolean = false
+ /** Instruct the scaladoc tool to skip certain packages */
+ private var docSkipPackages: String = ""
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
@@ -442,6 +446,12 @@ class Scaladoc extends ScalaMatchingTask {
def setGroups(input: String) =
docGroups = Flag.getBooleanValue(input, "groups")
+ /** Instruct the scaladoc tool to skip certain packages.
+ * @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc.
+ */
+ def setSkipPackages(input: String) =
+ docSkipPackages = input
+
/*============================================================================*\
** Properties getters **
\*============================================================================*/
@@ -642,6 +652,7 @@ class Scaladoc extends ScalaMatchingTask {
docSettings.docRawOutput.value = docRawOutput
docSettings.docNoPrefixes.value = docNoPrefixes
docSettings.docGroups.value = docGroups
+ docSettings.docSkipPackages.value = docSkipPackages
if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index 843ee043ea..7165474345 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -11,11 +11,11 @@ package scala.tools.ant.sabbus
import java.net.URL
-object Compilers extends collection.DefaultMap[String, Compiler] {
+object Compilers extends scala.collection.DefaultMap[String, Compiler] {
val debug = false
- private val container = new collection.mutable.HashMap[String, Compiler]
+ private val container = new scala.collection.mutable.HashMap[String, Compiler]
def iterator = container.iterator
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 3c0d1d77ca..a347df6d6e 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -1,91 +1,91 @@
-@@echo off
-
-rem ##########################################################################
-rem # Copyright 2002-2012 LAMP/EPFL
-rem #
-rem # This is free software; see the distribution for copying conditions.
-rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
-rem # PARTICULAR PURPOSE.
-rem ##########################################################################
-
-setlocal enableextensions enabledelayedexpansion
-
-set _LINE_TOOLCP=
-
-:another_param
-
-if "%1%"=="-toolcp" (
- set _LINE_TOOLCP=%2%
- shift
- shift
- goto another_param
-)
-
-set _LINE_PARAMS=%1
-:param_loop
-shift
-if [%1]==[] goto param_afterloop
-set _LINE_PARAMS=%_LINE_PARAMS% %1
-goto param_loop
-:param_afterloop
-if "%OS%" NEQ "Windows_NT" (
- echo "Warning, your version of Windows is not supported. Attempting to start scala anyway."
-)
-
-@@setlocal
-call :set_home
-
-rem We use the value of the JAVACMD environment variable if defined
-set _JAVACMD=%JAVACMD%
-
-if not defined _JAVACMD (
- if not "%JAVA_HOME%"=="" (
- if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
- )
-)
-
-if "%_JAVACMD%"=="" set _JAVACMD=java
-
-rem We use the value of the JAVA_OPTS environment variable if defined
-set _JAVA_OPTS=%JAVA_OPTS%
-if not defined _JAVA_OPTS set _JAVA_OPTS=@javaflags@
-
-set _TOOL_CLASSPATH=@classpath@
-if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
- for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
-)
-
-if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%"
-
-set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@
-
-rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
-"%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
-goto end
-
-rem ##########################################################################
-rem # subroutines
-
-:add_cpath
- if "%_TOOL_CLASSPATH%"=="" (
- set _TOOL_CLASSPATH=%~1
- ) else (
- set _TOOL_CLASSPATH=%_TOOL_CLASSPATH%;%~1
- )
-goto :eof
-
-rem Variable "%~dps0" works on WinXP SP2 or newer
-rem (see http://support.microsoft.com/?kbid=833431)
-rem set _SCALA_HOME=%~dps0..
-:set_home
- set _BIN_DIR=
- for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
- set _SCALA_HOME=%_BIN_DIR%..
-goto :eof
-
-:end
-@@endlocal
-
-REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu
-@@%COMSPEC% /C exit %errorlevel% >nul
+@@echo off
+
+rem ##########################################################################
+rem # Copyright 2002-2012 LAMP/EPFL
+rem #
+rem # This is free software; see the distribution for copying conditions.
+rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
+rem # PARTICULAR PURPOSE.
+rem ##########################################################################
+
+setlocal enableextensions enabledelayedexpansion
+
+set _LINE_TOOLCP=
+
+:another_param
+
+if "%1%"=="-toolcp" (
+ set _LINE_TOOLCP=%2%
+ shift
+ shift
+ goto another_param
+)
+
+set _LINE_PARAMS=%1
+:param_loop
+shift
+if [%1]==[] goto param_afterloop
+set _LINE_PARAMS=%_LINE_PARAMS% %1
+goto param_loop
+:param_afterloop
+if "%OS%" NEQ "Windows_NT" (
+ echo "Warning, your version of Windows is not supported. Attempting to start scala anyway."
+)
+
+@@setlocal
+call :set_home
+
+rem We use the value of the JAVACMD environment variable if defined
+set _JAVACMD=%JAVACMD%
+
+if not defined _JAVACMD (
+ if not "%JAVA_HOME%"=="" (
+ if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
+ )
+)
+
+if "%_JAVACMD%"=="" set _JAVACMD=java
+
+rem We use the value of the JAVA_OPTS environment variable if defined
+set _JAVA_OPTS=%JAVA_OPTS%
+if not defined _JAVA_OPTS set _JAVA_OPTS=@javaflags@
+
+set _TOOL_CLASSPATH=@classpath@
+if "%_TOOL_CLASSPATH%"=="" (
+ for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+)
+
+if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%"
+
+set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@
+
+rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
+"%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
+goto end
+
+rem ##########################################################################
+rem # subroutines
+
+:add_cpath
+ if "%_TOOL_CLASSPATH%"=="" (
+ set _TOOL_CLASSPATH=%~1
+ ) else (
+ set _TOOL_CLASSPATH=%_TOOL_CLASSPATH%;%~1
+ )
+goto :eof
+
+rem Variable "%~dps0" works on WinXP SP2 or newer
+rem (see http://support.microsoft.com/?kbid=833431)
+rem set _SCALA_HOME=%~dps0..
+:set_home
+ set _BIN_DIR=
+ for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
+ set _SCALA_HOME=%_BIN_DIR%..
+goto :eof
+
+:end
+@@endlocal
+
+REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu
+@@%COMSPEC% /C exit %errorlevel% >nul
diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala
index aae5bebcc8..bde7bb8cb8 100644
--- a/src/compiler/scala/tools/cmd/Property.scala
+++ b/src/compiler/scala/tools/cmd/Property.scala
@@ -64,7 +64,7 @@ trait Property extends Reference {
propertiesToOptions(loadProperties(file))
def propertiesToOptions(props: java.util.Properties): List[String] = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
propertiesToOptions(props.toList)
}
def propertiesToOptions(props: List[(String, String)]) = props flatMap propMapper
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index 77fe01051e..b6c564e9fb 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -6,7 +6,7 @@
package scala.tools
package cmd
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
import nsc.Properties.envOrNone
/** Mixes in the specification trait and uses the vals therein to
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 7842603af7..6d652ffdfe 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -14,7 +14,7 @@ trait AnyValReps {
sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
case class Op(val op : String, val doc : String)
-
+
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
"""implicit def @javaequiv@2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
@@ -24,7 +24,7 @@ trait AnyValReps {
def coercionComment = """
/** Language mandated coercions from @name@ to "wider" types.%s
*/""".format(coercionCommentExtra)
-
+
def implicitCoercions: List[String] = {
val coercions = this match {
case B => companionCoercions(S, I, L, F, D)
@@ -247,7 +247,7 @@ trait AnyValReps {
def classDoc = interpolate(classDocTemplate)
def objectDoc = ""
def mkImports = ""
-
+
def mkClass = assemble("final abstract class " + name + " private extends AnyVal", classLines)
def mkObject = assemble("object " + name + " extends AnyValCompanion", objectLines)
def make() = List[String](
@@ -281,7 +281,7 @@ trait AnyValTemplates {
%s
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
""".trim.format(timestampString) + "\n\n")
@@ -341,9 +341,6 @@ final val NaN = @boxed@.NaN
final val PositiveInfinity = @boxed@.POSITIVE_INFINITY
final val NegativeInfinity = @boxed@.NEGATIVE_INFINITY
-@deprecated("use @name@.MinPositiveValue instead", "2.9.0")
-final val Epsilon = MinPositiveValue
-
/** The negative number with the greatest (finite) absolute value which is representable
* by a @name@. Note that it differs from [[java.lang.@name@.MIN_VALUE]], which
* is the smallest positive value representable by a @name@. In Scala that number
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index b94c640f1c..ff3d41c8b7 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -6,7 +6,7 @@
package scala.tools.cmd
package gen
-import language.postfixOps
+import scala.language.postfixOps
class Codegen(args: List[String]) extends {
val parsed = CodegenSpec(args: _*)
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index 5be98a460a..8c6716be78 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -9,8 +9,8 @@ package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
// make some language features in this package compile without warning
- implicit def implicitConversions = language.implicitConversions
- implicit def postfixOps = language.postfixOps
+ implicit def implicitConversions = scala.language.implicitConversions
+ implicit def postfixOps = scala.language.postfixOps
private[cmd] def debug(msg: String) = println(msg)
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 6d3fd2e09e..5a2d5ef165 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -23,7 +23,7 @@ trait CompilationUnits { self: Global =>
/** One unit of compilation that has been submitted to the compiler.
* It typically corresponds to a single file of source code. It includes
* error-reporting hooks. */
- class CompilationUnit(val source: SourceFile) {
+ class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi {
/** the fresh name creator */
var fresh: FreshNameCreator = new FreshNameCreator.Default
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index da03419d8a..bd1381faf5 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import annotation.tailrec
+import scala.annotation.tailrec
import java.io.EOFException
trait EvalLoop {
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 2b0f082051..708824ede1 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -7,13 +7,13 @@ package scala.tools.nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import compat.Platform.currentTime
+import scala.compat.Platform.currentTime
import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
-import scala.reflect.internal.util.{ NoPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
@@ -29,8 +29,8 @@ import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
import backend.jvm.{GenJVM, GenASM}
import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
-import language.postfixOps
-import reflect.internal.StdAttachments
+import scala.language.postfixOps
+import scala.reflect.internal.StdAttachments
import scala.reflect.ClassTag
class Global(var currentSettings: Settings, var reporter: Reporter)
@@ -43,8 +43,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
with DocComments
with Positions { self =>
- // [Eugene++] would love to find better homes for the new things dumped into Global
-
// the mirror --------------------------------------------------
override def isCompilerUniverse = true
@@ -62,27 +60,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
def RootClass: ClassSymbol = rootMirror.RootClass
def EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
- // [Eugene++] this little inconvenience gives us precise types for Expr.mirror and TypeTag.mirror
- // by the way, is it possible to define variant type members?
-
- override def settings = currentSettings
import definitions.findNamedMember
def findMemberFromRoot(fullName: Name): Symbol = rootMirror.findMemberFromRoot(fullName)
// alternate constructors ------------------------------------------
+ override def settings = currentSettings
+
def this(reporter: Reporter) =
this(new Settings(err => reporter.error(null, err)), reporter)
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- // fulfilling requirements
- // Renamed AbstractFile to AbstractFileType for backward compatibility:
- // it is difficult for sbt to work around the ambiguity errors which result.
- type AbstractFileType = scala.tools.nsc.io.AbstractFile
-
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
@@ -105,6 +96,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Generate ASTs */
type TreeGen = scala.tools.nsc.ast.TreeGen
+ /** Tree generation, usually based on existing symbols. */
override object gen extends {
val global: Global.this.type = Global.this
} with TreeGen {
@@ -112,6 +104,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
typer.typed(mkCast(tree, pt))
}
+ /** Trees fresh from the oven, mostly for use by the parser. */
+ object treeBuilder extends {
+ val global: Global.this.type = Global.this
+ } with TreeBuilder {
+ def freshName(prefix: String): Name = freshTermName(prefix)
+ def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
+ def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
+ def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
+ }
+
/** Fold constants */
object constfold extends {
val global: Global.this.type = Global.this
@@ -220,11 +223,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
- def globalError(msg: String) = reporter.error(NoPosition, msg)
- def inform(msg: String) = reporter.echo(msg)
- def warning(msg: String) =
- if (opt.fatalWarnings) globalError(msg)
+ def error(msg: String) = globalError(msg)
+ def inform(msg: String) = reporter.echo(msg)
+ override def globalError(msg: String) = reporter.error(NoPosition, msg)
+ override def warning(msg: String) =
+ if (settings.fatalWarnings.value) globalError(msg)
else reporter.warning(NoPosition, msg)
// Getting in front of Predef's asserts to supplement with more info.
@@ -275,9 +278,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
}
- def shouldLogAtThisPhase = (
- (settings.log.isSetByUser)
- && ((settings.log containsPhase globalPhase) || (settings.log containsPhase phase))
+ override def shouldLogAtThisPhase = settings.log.isSetByUser && (
+ (settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
)
// Over 200 closure objects are eliminated by inlining this.
@inline final def log(msg: => AnyRef) {
@@ -873,8 +875,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Is given package class a system package class that cannot be invalidated?
*/
private def isSystemPackageClass(pkg: Symbol) =
- // [Eugene++ to Martin] please, verify
-// was: pkg == definitions.RootClass ||
pkg == RootClass ||
pkg == definitions.ScalaPackageClass || {
val pkgname = pkg.fullName
@@ -937,12 +937,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
else new MergedClassPath(elems, classPath.context)
val oldEntries = mkClassPath(subst.keys)
val newEntries = mkClassPath(subst.values)
- // [Eugene++ to Martin] please, verify
-// was: reSync(definitions.RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed)
reSync(RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed)
}
}
- def show(msg: String, syms: collection.Traversable[Symbol]) =
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
if (syms.nonEmpty)
informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
show("invalidated packages", invalidated)
@@ -998,8 +996,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
invalidateOrRemove(root)
} else {
if (classesFound) {
- // [Eugene++ to Martin] please, verify
-// was: if (root.isRoot) invalidateOrRemove(definitions.EmptyPackageClass)
if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
else failed += root
}
@@ -1083,12 +1079,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* of what file was being compiled when it broke. Since I really
* really want to know, this hack.
*/
- private var lastSeenSourceFile: SourceFile = NoSourceFile
+ protected var lastSeenSourceFile: SourceFile = NoSourceFile
/** Let's share a lot more about why we crash all over the place.
* People will be very grateful.
*/
- private var lastSeenContext: analyzer.Context = null
+ protected var lastSeenContext: analyzer.Context = null
/** The currently active run
*/
@@ -1098,6 +1094,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// TODO - trim these to the absolute minimum.
@inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
+ @inline final def afterPostErasure[T](op: => T): T = afterPhase(currentRun.posterasurePhase)(op)
@inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
@inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op)
@inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op)
@@ -1207,7 +1204,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** A Run is a single execution of the compiler on a sets of units
*/
- class Run {
+ class Run extends RunContextApi {
/** Have been running into too many init order issues with Run
* during erroneous conditions. Moved all these vals up to the
* top of the file so at least they're not trivially null.
@@ -1230,8 +1227,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
var reportedFeature = Set[Symbol]()
- /** A flag whether macro expansions failed */
- var macroExpansionFailed = false
+ /** Has any macro expansion used a fallback during this run? */
+ var seenMacroExpansionsFallingBack = false
/** To be initialized from firstPhase. */
private var terminalPhase: Phase = NoPhase
@@ -1407,6 +1404,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val specializePhase = phaseNamed("specialize")
val explicitouterPhase = phaseNamed("explicitouter")
val erasurePhase = phaseNamed("erasure")
+ val posterasurePhase = phaseNamed("posterasure")
// val lazyvalsPhase = phaseNamed("lazyvals")
val lambdaliftPhase = phaseNamed("lambdalift")
// val constructorsPhase = phaseNamed("constructors")
@@ -1521,7 +1519,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
else {
allConditionalWarnings foreach (_.summarize)
- if (macroExpansionFailed)
+ if (seenMacroExpansionsFallingBack)
warning("some macros could not be expanded and code fell back to overridden methods;"+
"\nrecompiling with generated classfiles on the classpath might help.")
// todo: migrationWarnings
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index e69382be21..46cdc6a4a0 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
import java.io.{ BufferedWriter, FileWriter }
import scala.collection.mutable
-import language.postfixOps
+import scala.language.postfixOps
/**
* PhaseAssembly
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
index d2274b108b..c80be474a6 100644
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ b/src/compiler/scala/tools/nsc/Phases.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
import symtab.Flags
-import reflect.internal.util.TableDef
-import language.postfixOps
+import scala.reflect.internal.util.TableDef
+import scala.language.postfixOps
object Phases {
val MaxPhases = 64
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 1fdf4c631e..d1faa4d219 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package ast
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
import symtab.Flags._
-import language.postfixOps
+import scala.language.postfixOps
/** The object `nodePrinter` converts the internal tree
* representation to a string.
@@ -145,8 +145,7 @@ abstract class NodePrinters {
str.toString
}
def printModifiers(tree: MemberDef) {
- // [Eugene++] there's most likely a bug here (?)
- // see `Printers.printAnnotations` for more information
+ // SI-5885: by default this won't print annotations of not yet initialized symbols
val annots0 = tree.symbol.annotations match {
case Nil => tree.mods.annotations
case xs => xs map annotationInfoToString
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index 74d1f8ab4b..d8fb632f73 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -11,13 +11,6 @@ trait Positions extends scala.reflect.internal.Positions {
def validatePositions(tree: Tree) {}
- // [Eugene] disabling this for now. imo it doesn't justify pollution of the public API
- // override def _checkSetAnnotation(tree: Tree, annot: TreeAnnotation): Unit = {
- // if (tree.pos != NoPosition && tree.pos != annot.pos) debugwarn("Overwriting annotation "+ tree.annotation +" of tree "+ tree +" with annotation "+ annot)
- // // if ((tree.annotation.isInstanceOf[scala.reflect.internal.util.Position] || !annot.isInstanceOf[scala.reflect.internal.util.Position]) && tree.isInstanceOf[Block])
- // // println("Updating block from "+ tree.annotation +" to "+ annot)
- // }
-
class ValidatingPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 8b92f0acd6..3392b78595 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -10,7 +10,7 @@ import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-trait Printers extends reflect.internal.Printers { this: Global =>
+trait Printers extends scala.reflect.internal.Printers { this: Global =>
import treeInfo.{ IsTrue, IsFalse }
@@ -278,6 +278,7 @@ trait Printers extends reflect.internal.Printers { this: Global =>
def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 043834ae55..267a5dcefd 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -18,7 +18,7 @@ import scala.concurrent.Lock
import scala.text._
import symtab.Flags._
import symtab.SymbolTable
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* Tree browsers can show the AST in a graphical and interactive
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 2ee38d4b91..01bd0bbb06 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -9,7 +9,7 @@ package ast
import PartialFunction._
import symtab.Flags
-import language.implicitConversions
+import scala.language.implicitConversions
/** A DSL for generating scala code. The goal is that the
* code generating code should look a lot like the code it
@@ -95,7 +95,7 @@ trait TreeDSL {
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
-
+
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
def GEN_& (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.AND), other)
@@ -234,7 +234,7 @@ trait TreeDSL {
}
class DefTreeStart(val name: Name) extends TreeVODDStart with DefCreator {
def tparams: List[TypeDef] = Nil
- def vparamss: List[List[ValDef]] = List(Nil)
+ def vparamss: List[List[ValDef]] = ListOfNil
}
class IfStart(cond: Tree, thenp: Tree) {
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index ca8a377c6f..fc8228f644 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -9,12 +9,12 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
import symtab.SymbolTable
-import language.postfixOps
+import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
*/
-abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
+abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
val global: Global
import global._
@@ -44,7 +44,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
setInfo analyzer.ImportType(qual)
)
val importTree = (
- Import(qual, List(ImportSelector(nme.WILDCARD, -1, null, -1)))
+ Import(qual, ImportSelector.wildList)
setSymbol importSym
setType NoType
)
@@ -58,7 +58,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
// are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), List(Nil)), expr)
+ Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
}
// if it's a Match, mark the selector unchecked; otherwise nothing.
def mkUncheckedMatch(tree: Tree) = tree match {
@@ -357,8 +357,8 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
*/
def mkSynchronizedCheck(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
mkSynchronizedCheck(mkAttributedThis(clazz), cond, syncBody, stats)
-
- def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
+
+ def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
Block(mkSynchronized(
attrThis,
If(cond, Block(syncBody: _*), EmptyTree)) ::
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index e755553e25..9e46155d14 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package ast
-import reflect.internal.HasFlags
-import reflect.internal.Flags._
+import scala.reflect.internal.HasFlags
+import scala.reflect.internal.Flags._
import symtab._
/** This class ...
@@ -15,7 +15,7 @@ import symtab._
* @author Martin Odersky
* @version 1.0
*/
-abstract class TreeInfo extends reflect.internal.TreeInfo {
+abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
val global: Global
import global._
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 381b834a0c..dec7b648ee 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -15,7 +15,7 @@ import scala.reflect.internal.Flags.PRESUPER
import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
-trait Trees extends reflect.internal.Trees { self: Global =>
+trait Trees extends scala.reflect.internal.Trees { self: Global =>
def treeLine(t: Tree): String =
if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
@@ -48,12 +48,12 @@ trait Trees extends reflect.internal.Trees { self: Global =>
override def isType = definition.isType
}
- /** Array selection <qualifier> . <name> only used during erasure */
+ /** Array selection `<qualifier> . <name>` only used during erasure */
case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
- extends TermTree with RefTree
+ extends RefTree with TermTree
- /** Derived value class injection (equivalent to: new C(arg) after easure); only used during erasure
- * The class C is stored as the symbol of the tree node.
+ /** Derived value class injection (equivalent to: `new C(arg)` after erasure); only used during erasure.
+ * The class `C` is stored as a tree attachment.
*/
case class InjectDerivedValue(arg: Tree)
extends SymTree
@@ -111,16 +111,13 @@ trait Trees extends reflect.internal.Trees { self: Global =>
if (body forall treeInfo.isInterfaceMember) List()
else List(
atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), List(List()), TypeTree(), Block(lvdefs, Literal(Constant())))))
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
} else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1;
val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- def mkApply(fun: Tree, args: List[Tree]) = Apply(fun, args)
- val superCall = (superRef /: argss) (mkApply)
- // [Eugene++] no longer compiles after I moved the `Apply` case class into scala.reflect.internal
- // val superCall = (superRef /: argss) (Apply)
+ val superCall = (superRef /: argss) (Apply.apply)
List(
atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
@@ -181,7 +178,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case _ => super.xtraverse(traverser, tree)
}
- trait TreeCopier extends super.TreeCopierOps {
+ trait TreeCopier extends super.InternalTreeCopierOps {
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue
@@ -284,7 +281,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
val trace = scala.tools.nsc.util.trace when debug
val locals = util.HashSet[Symbol](8)
- val orderedLocals = collection.mutable.ListBuffer[Symbol]()
+ val orderedLocals = scala.collection.mutable.ListBuffer[Symbol]()
def registerLocal(sym: Symbol) {
if (sym != null && sym != NoSymbol) {
if (debug && !(locals contains sym)) orderedLocals append sym
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 391874c488..eaee39d7e6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -192,7 +192,7 @@ self =>
override def blockExpr(): Tree = skipBraces(EmptyTree)
- override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, List(EmptyTree)))
+ override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, EmptyTree.asList))
}
class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
@@ -280,14 +280,6 @@ self =>
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
- object treeBuilder extends TreeBuilder {
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = Parser.this.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = Parser.this.freshTypeName(prefix)
- def o2p(offset: Int) = Parser.this.o2p(offset)
- def r2p(start: Int, point: Int, end: Int) = Parser.this.r2p(start, point, end)
- }
import treeBuilder.{global => _, _}
/** The types of the context bounds of type parameters of the surrounding class
@@ -395,7 +387,7 @@ self =>
NoMods,
nme.CONSTRUCTOR,
Nil,
- List(Nil),
+ ListOfNil,
TypeTree(),
Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
)
@@ -404,8 +396,7 @@ self =>
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
- def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
- def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
+ def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
@@ -477,7 +468,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -1038,7 +1029,7 @@ self =>
val tok = in.token
val name = ident()
t = atPos(start) {
- if (tok == BACKQUOTED_IDENT) Ident(name) addAttachment BackquotedIdentifierAttachment
+ if (tok == BACKQUOTED_IDENT) Ident(name) updateAttachment BackquotedIdentifierAttachment
else Ident(name)
}
if (in.token == DOT) {
@@ -1303,22 +1294,24 @@ self =>
res
}
- def expr0(location: Int): Tree = in.token match {
+
+ def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
- atPos(in.skipToken()) {
+ def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else Literal(Constant())
+ else Literal(Constant())
If(cond, thenp, elsep)
}
+ parseIf
case TRY =>
- atPos(in.skipToken()) {
+ def parseTry = atPos(in.skipToken()) {
val body = in.token match {
- case LBRACE => inBracesOrUnit(block())
- case LPAREN => inParensOrUnit(expr())
- case _ => expr()
+ case LBRACE => inBracesOrUnit(block())
+ case LPAREN => inParensOrUnit(expr())
+ case _ => expr()
}
def catchFromExpr() = List(makeCatchFromExpr(expr()))
val catches: List[CaseDef] =
@@ -1332,32 +1325,40 @@ self =>
}
}
val finalizer = in.token match {
- case FINALLY => in.nextToken() ; expr()
- case _ => EmptyTree
+ case FINALLY => in.nextToken(); expr()
+ case _ => EmptyTree
}
Try(body, catches, finalizer)
}
+ parseTry
case WHILE =>
- val start = in.offset
- atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.WHILE_PREFIX)
- val cond = condExpr()
- newLinesOpt()
- val body = expr()
- makeWhile(lname, cond, body)
+ def parseWhile = {
+ val start = in.offset
+ atPos(in.skipToken()) {
+ val lname: Name = freshTermName(nme.WHILE_PREFIX)
+ val cond = condExpr()
+ newLinesOpt()
+ val body = expr()
+ makeWhile(lname, cond, body)
+ }
}
+ parseWhile
case DO =>
- val start = in.offset
- atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
- val body = expr()
- if (isStatSep) in.nextToken()
- accept(WHILE)
- val cond = condExpr()
- makeDoWhile(lname, body, cond)
+ def parseDo = {
+ val start = in.offset
+ atPos(in.skipToken()) {
+ val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
+ val body = expr()
+ if (isStatSep) in.nextToken()
+ accept(WHILE)
+ val cond = condExpr()
+ makeDoWhile(lname, body, cond)
+ }
}
+ parseDo
case FOR =>
- atPos(in.skipToken()) {
+ val start = in.skipToken()
+ def parseFor = atPos(start) {
val enums =
if (in.token == LBRACE) inBracesOrNil(enumerators())
else inParensOrNil(enumerators())
@@ -1369,70 +1370,82 @@ self =>
makeFor(enums, expr())
}
}
+ def adjustStart(tree: Tree) =
+ if (tree.pos.isRange && start < tree.pos.start)
+ tree setPos tree.pos.withStart(start)
+ else tree
+ adjustStart(parseFor)
case RETURN =>
- atPos(in.skipToken()) {
- Return(if (isExprIntro) expr() else Literal(Constant()))
- }
+ def parseReturn =
+ atPos(in.skipToken()) {
+ Return(if (isExprIntro) expr() else Literal(Constant()))
+ }
+ parseReturn
case THROW =>
- atPos(in.skipToken()) {
- Throw(expr())
- }
+ def parseThrow =
+ atPos(in.skipToken()) {
+ Throw(expr())
+ }
+ parseThrow
case IMPLICIT =>
implicitClosure(in.skipToken(), location)
case _ =>
- var t = postfixExpr()
- if (in.token == EQUALS) {
- t match {
- case Ident(_) | Select(_, _) | Apply(_, _) =>
- t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
- case _ =>
- }
- } else if (in.token == COLON) {
- t = stripParens(t)
- val colonPos = in.skipToken()
- if (in.token == USCORE) {
- //todo: need to handle case where USCORE is a wildcard in a type
- val uscorePos = in.skipToken()
- if (isIdent && in.name == nme.STAR) {
- in.nextToken()
- t = atPos(t.pos.startOrPoint, colonPos) {
- Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
- }
- } else {
- syntaxErrorOrIncomplete("`*' expected", true)
+ def parseOther = {
+ var t = postfixExpr()
+ if (in.token == EQUALS) {
+ t match {
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
+ case _ =>
}
- } else if (in.token == AT) {
- t = (t /: annotations(skipNewLines = false)) (makeAnnotated)
- } else {
- t = atPos(t.pos.startOrPoint, colonPos) {
- val tpt = typeOrInfixType(location)
- if (isWildcard(t))
- (placeholderParams: @unchecked) match {
- case (vd @ ValDef(mods, name, _, _)) :: rest =>
- placeholderParams = treeCopy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest
+ } else if (in.token == COLON) {
+ t = stripParens(t)
+ val colonPos = in.skipToken()
+ if (in.token == USCORE) {
+ //todo: need to handle case where USCORE is a wildcard in a type
+ val uscorePos = in.skipToken()
+ if (isIdent && in.name == nme.STAR) {
+ in.nextToken()
+ t = atPos(t.pos.startOrPoint, colonPos) {
+ Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
}
- // this does not correspond to syntax, but is necessary to
- // accept closures. We might restrict closures to be between {...} only.
- Typed(t, tpt)
+ } else {
+ syntaxErrorOrIncomplete("`*' expected", true)
+ }
+ } else if (in.token == AT) {
+ t = (t /: annotations(skipNewLines = false))(makeAnnotated)
+ } else {
+ t = atPos(t.pos.startOrPoint, colonPos) {
+ val tpt = typeOrInfixType(location)
+ if (isWildcard(t))
+ (placeholderParams: @unchecked) match {
+ case (vd @ ValDef(mods, name, _, _)) :: rest =>
+ placeholderParams = treeCopy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest
+ }
+ // this does not correspond to syntax, but is necessary to
+ // accept closures. We might restrict closures to be between {...} only.
+ Typed(t, tpt)
+ }
}
+ } else if (in.token == MATCH) {
+ t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
}
- } else if (in.token == MATCH) {
- t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
- }
- // in order to allow anonymous functions as statements (as opposed to expressions) inside
- // templates, we have to disambiguate them from self type declarations - bug #1565
- // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
- // may be impossible to distinguish from a self-type and so remains an error. (See #1564)
- def lhsIsTypedParamList() = t match {
- case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
- case _ => false
- }
- if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
- t = atPos(t.pos.startOrPoint, in.skipToken()) {
- Function(convertToParams(t), if (location != InBlock) expr() else block())
+ // in order to allow anonymous functions as statements (as opposed to expressions) inside
+ // templates, we have to disambiguate them from self type declarations - bug #1565
+ // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
+ // may be impossible to distinguish from a self-type and so remains an error. (See #1564)
+ def lhsIsTypedParamList() = t match {
+ case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+ case _ => false
+ }
+ if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
+ t = atPos(t.pos.startOrPoint, in.skipToken()) {
+ Function(convertToParams(t), if (location != InBlock) expr() else block())
+ }
}
+ stripParens(t)
}
- stripParens(t)
+ parseOther
}
/** {{{
@@ -2093,7 +2106,7 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, List(Nil))
+ else New(t, ListOfNil)
}
/* -------- PARAMETERS ------------------------------------------- */
@@ -2732,10 +2745,10 @@ self =>
def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
val parents = new ListBuffer[Tree] += startAnnotType()
val argss = (
- // TODO: the insertion of List(Nil) here is where "new Foo" becomes
+ // TODO: the insertion of ListOfNil here is where "new Foo" becomes
// indistinguishable from "new Foo()".
if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
- else List(Nil)
+ else ListOfNil
)
while (in.token == WITH) {
@@ -2773,7 +2786,7 @@ self =>
val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
(parents, argss, self1, earlyDefs ::: body1)
} else {
- (List(), List(List()), self, body)
+ (List(), ListOfNil, self, body)
}
} else {
val (parents, argss) = templateParents(isTrait = isTrait)
@@ -2800,7 +2813,7 @@ self =>
else {
newLineOptWhenFollowedBy(LBRACE)
val (self, body) = templateBodyOpt(traitParentSeen = false)
- (List(), List(List()), self, body)
+ (List(), ListOfNil, self, body)
}
)
def anyrefParents() = {
@@ -2813,7 +2826,7 @@ self =>
def anyvalConstructor() = (
// Not a well-formed constructor, has to be finished later - see note
// regarding AnyVal constructor in AddInterfaces.
- DefDef(NoMods, nme.CONSTRUCTOR, Nil, List(Nil), TypeTree(), Block(Nil, Literal(Constant())))
+ DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant())))
)
val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
@@ -2834,7 +2847,7 @@ self =>
* @param isPre specifies whether in early initializer (true) or not (false)
*/
def templateBody(isPre: Boolean) = inBraces(templateStatSeq(isPre = isPre)) match {
- case (self, Nil) => (self, List(EmptyTree))
+ case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
@@ -2938,7 +2951,7 @@ self =>
/** Informal - for the repl and other direct parser accessors.
*/
def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
- case Nil => List(EmptyTree)
+ case Nil => EmptyTree.asList
case stats => stats
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index f99b9a66c9..69091e4880 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -182,13 +182,13 @@ trait Scanners extends ScannersCommon {
/** Are we directly in a string interpolation expression?
*/
- @inline private def inStringInterpolation =
+ private def inStringInterpolation =
sepRegions.nonEmpty && sepRegions.head == STRINGLIT
/** Are we directly in a multiline string interpolation expression?
* @pre inStringInterpolation
*/
- @inline private def inMultiLineInterpolation =
+ private def inMultiLineInterpolation =
inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
/** read next token and return last offset
@@ -360,16 +360,19 @@ trait Scanners extends ScannersCommon {
if (ch == '"' && token == IDENTIFIER)
token = INTERPOLATIONID
case '<' => // is XMLSTART?
- val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
- nextChar()
- last match {
- case ' '|'\t'|'\n'|'{'|'('|'>' if isNameStart(ch) || ch == '!' || ch == '?' =>
- token = XMLSTART
- case _ =>
- // Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
- putChar('<')
- getOperatorRest()
+ def fetchLT() = {
+ val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
+ nextChar()
+ last match {
+ case ' ' | '\t' | '\n' | '{' | '(' | '>' if isNameStart(ch) || ch == '!' || ch == '?' =>
+ token = XMLSTART
+ case _ =>
+ // Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
+ putChar('<')
+ getOperatorRest()
+ }
}
+ fetchLT
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
@@ -386,78 +389,87 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
case '0' =>
- putChar(ch)
- nextChar()
- if (ch == 'x' || ch == 'X') {
+ def fetchZero() = {
+ putChar(ch)
nextChar()
- base = 16
- }
- else {
- /** What should leading 0 be in the future? It is potentially dangerous
- * to let it be base-10 because of history. Should it be an error? Is
- * there a realistic situation where one would need it?
- */
- if (isDigit(ch)) {
- if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
- else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ /**
+ * What should leading 0 be in the future? It is potentially dangerous
+ * to let it be base-10 because of history. Should it be an error? Is
+ * there a realistic situation where one would need it?
+ */
+ if (isDigit(ch)) {
+ if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
+ else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
+ }
+ base = 8
}
- base = 8
+ getNumber()
}
- getNumber()
+ fetchZero
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
case '`' =>
getBackquotedIdent()
case '\"' =>
- if (token == INTERPOLATIONID) {
- nextRawChar()
- if (ch == '\"') {
+ def fetchDoubleQuote() = {
+ if (token == INTERPOLATIONID) {
nextRawChar()
if (ch == '\"') {
nextRawChar()
- getStringPart(multiLine = true)
- sepRegions = STRINGPART :: sepRegions // indicate string part
- sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
+ if (ch == '\"') {
+ nextRawChar()
+ getStringPart(multiLine = true)
+ sepRegions = STRINGPART :: sepRegions // indicate string part
+ sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
} else {
- token = STRINGLIT
- strVal = ""
+ getStringPart(multiLine = false)
+ sepRegions = STRINGLIT :: sepRegions // indicate single line string part
}
} else {
- getStringPart(multiLine = false)
- sepRegions = STRINGLIT :: sepRegions // indicate single line string part
- }
- } else {
- nextChar()
- if (ch == '\"') {
nextChar()
if (ch == '\"') {
- nextRawChar()
- getRawStringLit()
+ nextChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getRawStringLit()
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
} else {
- token = STRINGLIT
- strVal = ""
+ getStringLit()
}
- } else {
- getStringLit()
}
}
+ fetchDoubleQuote
case '\'' =>
- nextChar()
- if (isIdentifierStart(ch))
- charLitOr(getIdentRest)
- else if (isOperatorPart(ch) && (ch != '\\'))
- charLitOr(getOperatorRest)
- else {
- getLitChar()
- if (ch == '\'') {
- nextChar()
- token = CHARLIT
- setStrVal()
- } else {
- syntaxError("unclosed character literal")
+ def fetchSingleQuote() = {
+ nextChar()
+ if (isIdentifierStart(ch))
+ charLitOr(getIdentRest)
+ else if (isOperatorPart(ch) && (ch != '\\'))
+ charLitOr(getOperatorRest)
+ else {
+ getLitChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ syntaxError("unclosed character literal")
+ }
}
}
+ fetchSingleQuote
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
@@ -488,22 +500,25 @@ trait Scanners extends ScannersCommon {
nextChar()
}
case _ =>
- if (ch == '\u21D2') {
- nextChar(); token = ARROW
- } else if (ch == '\u2190') {
- nextChar(); token = LARROW
- } else if (Character.isUnicodeIdentifierStart(ch)) {
- putChar(ch)
- nextChar()
- getIdentRest()
- } else if (isSpecial(ch)) {
- putChar(ch)
- nextChar()
- getOperatorRest()
- } else {
- syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
- nextChar()
+ def fetchOther() = {
+ if (ch == '\u21D2') {
+ nextChar(); token = ARROW
+ } else if (ch == '\u2190') {
+ nextChar(); token = LARROW
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else if (isSpecial(ch)) {
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ } else {
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+ nextChar()
+ }
}
+ fetchOther
}
}
@@ -739,8 +754,12 @@ trait Scanners extends ScannersCommon {
} else {
val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
if (isUnclosedLiteral) {
- syntaxError(if (!multiLine) "unclosed string literal" else "unclosed multi-line string literal")
- } else {
+ if (multiLine)
+ incompleteInputError("unclosed multi-line string literal")
+ else
+ syntaxError("unclosed string literal")
+ }
+ else {
putChar(ch)
nextRawChar()
getStringPart(multiLine)
@@ -1283,7 +1302,7 @@ trait Scanners extends ScannersCommon {
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
init()
@@ -1298,7 +1317,7 @@ trait Scanners extends ScannersCommon {
var lineCount = 1
var lastOffset = 0
var indent = 0
- val oldBalance = collection.mutable.Map[Int, Int]()
+ val oldBalance = scala.collection.mutable.Map[Int, Int]()
def markBalance() = for ((k, v) <- balance) oldBalance(k) = v
markBalance()
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 5afec611e9..9466b7222d 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -7,11 +7,11 @@ package scala.tools.nsc
package ast.parser
import scala.collection.{ mutable, immutable }
-import xml.{ EntityRef, Text }
-import xml.XML.{ xmlns }
+import scala.xml.{ EntityRef, Text }
+import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.StringOps.splitWhere
-import language.implicitConversions
+import scala.language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -144,7 +144,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
(buf map convertToTextPat).toList
def parseAttribute(pos: Position, s: String): Tree = {
- val ts = xml.Utility.parseAttributeValue(s) map {
+ val ts = scala.xml.Utility.parseAttributeValue(s) map {
case Text(s) => text(pos, s)
case EntityRef(s) => entityRef(pos, s)
}
@@ -162,7 +162,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
/** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */
def makeXMLseq(pos: Position, args: Seq[Tree]) = {
- val buffer = ValDef(NoMods, _buf, TypeTree(), New(_scala_xml_NodeBuffer, List(Nil)))
+ val buffer = ValDef(NoMods, _buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil))
val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t)))
atPos(pos)( Block(buffer :: applies.toList, Ident(_buf)) )
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 7e7972f9dd..9ce74b2b17 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package ast.parser
-import annotation.switch
+import scala.annotation.switch
/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
* as one might like because JavaTokens for no clear reason chose new numbers for
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index ab856f09b8..afafff4a64 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -199,6 +199,15 @@ abstract class TreeBuilder {
}
}
+ /** Creates a tree representing new Object { stats }.
+ * To make sure an anonymous subclass of Object is created,
+ * if there are no stats, a () is added.
+ */
+ def makeAnonymousNew(stats: List[Tree]): Tree = {
+ val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+ makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
+ }
+
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
@@ -217,12 +226,12 @@ abstract class TreeBuilder {
atPos(cpos) {
ClassDef(
Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, List(Nil), argss, stats, cpos.focus))
+ Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
}),
atPos(npos) {
New(
Ident(x) setPos npos.focus,
- List(Nil))
+ ListOfNil)
}
)
}
@@ -546,10 +555,7 @@ abstract class TreeBuilder {
rhs1,
List(
atPos(pat1.pos) {
- def mkIdent(name: Name) = Ident(name)
- CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map mkIdent, true))
- // [Eugene++] no longer compiles after I moved the `Ident` case class into scala.reflect.internal
- // CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident, true))
+ CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
}
))
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 3f0cef6703..06492e4ac6 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -11,7 +11,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, ArrayBuffer }
import scala.reflect.internal.util.{ Position, NoPosition }
import backend.icode.analysis.ProgramPoint
-import language.postfixOps
+import scala.language.postfixOps
trait BasicBlocks {
self: ICodes =>
@@ -20,6 +20,12 @@ trait BasicBlocks {
import global.{ ifDebug, settings, log, nme }
import nme.isExceptionResultName
+ /** Override Array creation for efficiency (to not go through reflection). */
+ private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
+ def runtimeClass: java.lang.Class[Instruction] = classOf[Instruction]
+ final override def newArray(len: Int): Array[Instruction] = new Array[Instruction](len)
+ }
+
object NoBasicBlock extends BasicBlock(-1, null)
/** This class represents a basic block. Each
@@ -36,10 +42,14 @@ trait BasicBlocks {
private final class SuccessorList() {
private var successors: List[BasicBlock] = Nil
+ /** This method is very hot! Handle with care. */
private def updateConserve() {
- var lb: ListBuffer[BasicBlock] = null
- var matches = 0
- var remaining = successors
+ var lb: ListBuffer[BasicBlock] = null
+ var matches = 0
+ var remaining = successors
+ val direct = directSuccessors
+ var scratchHandlers: List[ExceptionHandler] = method.exh
+ var scratchBlocks: List[BasicBlock] = direct
def addBlock(bb: BasicBlock) {
if (matches < 0)
@@ -54,25 +64,27 @@ trait BasicBlocks {
}
}
- // exceptionSuccessors
- method.exh foreach { handler =>
- if (handler covers outer)
- addBlock(handler.startBlock)
+ while (scratchBlocks ne Nil) {
+ addBlock(scratchBlocks.head)
+ scratchBlocks = scratchBlocks.tail
}
- // directSuccessors
- val direct = directSuccessors
- direct foreach addBlock
-
/** Return a list of successors for 'b' that come from exception handlers
* covering b's (non-exceptional) successors. These exception handlers
* might not cover 'b' itself. This situation corresponds to an
* exception being thrown as the first thing of one of b's successors.
*/
- method.exh foreach { handler =>
- direct foreach { block =>
- if (handler covers block)
+ while (scratchHandlers ne Nil) {
+ val handler = scratchHandlers.head
+ if (handler covers outer)
+ addBlock(handler.startBlock)
+
+ scratchBlocks = direct
+ while (scratchBlocks ne Nil) {
+ if (handler covers scratchBlocks.head)
addBlock(handler.startBlock)
+ scratchBlocks = scratchBlocks.tail
}
+ scratchHandlers = scratchHandlers.tail
}
// Blocks did not align: create a new list.
if (matches < 0)
@@ -95,7 +107,7 @@ trait BasicBlocks {
}
/** Flags of this basic block. */
- private var flags: Int = 0
+ private[this] var flags: Int = 0
/** Does this block have the given flag? */
def hasFlag(flag: Int): Boolean = (flags & flag) != 0
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index a480429026..2fa9c076dd 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -13,7 +13,7 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
-import language.postfixOps
+import scala.language.postfixOps
/** This class ...
*
@@ -121,42 +121,26 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- if (m.symbol.isAccessor && m.symbol.accessed.hasStaticAnnotation) {
- // in companion object accessors to @static fields, we access the static field directly
- val hostClass = m.symbol.owner.companionClass
- val staticfield = hostClass.info.findMember(m.symbol.accessed.name, NoFlags, NoFlags, false)
-
- if (m.symbol.isGetter) {
- ctx1.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos)
+ ctx1 = genLoad(rhs, ctx1, m.returnType);
+
+ // reverse the order of the local variables, to match the source-order
+ m.locals = m.locals.reverse
+
+ rhs match {
+ case Block(_, Return(_)) => ()
+ case Return(_) => ()
+ case EmptyTree =>
+ globalError("Concrete method has no definition: " + tree + (
+ if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+ else "")
+ )
+ case _ => if (ctx1.bb.isEmpty)
+ ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
+ else
ctx1.bb.closeWith(RETURN(m.returnType))
- } else if (m.symbol.isSetter) {
- ctx1.bb.emit(LOAD_LOCAL(m.locals.head), tree.pos)
- ctx1.bb.emit(STORE_FIELD(staticfield, true), tree.pos)
- ctx1.bb.closeWith(RETURN(m.returnType))
- } else assert(false, "unreachable")
- } else {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
-
- // reverse the order of the local variables, to match the source-order
- m.locals = m.locals.reverse
-
- rhs match {
- case Block(_, Return(_)) => ()
- case Return(_) => ()
- case EmptyTree =>
- globalError("Concrete method has no definition: " + tree + (
- if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
- else "")
- )
- case _ =>
- if (ctx1.bb.isEmpty)
- ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
- else
- ctx1.bb.closeWith(RETURN(m.returnType))
- }
- if (!ctx1.bb.closed) ctx1.bb.close
- prune(ctx1.method)
}
+ if (!ctx1.bb.closed) ctx1.bb.close
+ prune(ctx1.method)
} else
ctx1.method.setCode(NoCode)
ctx1
@@ -628,48 +612,54 @@ abstract class GenICode extends SubComponent {
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
- val ctx1 = ctx.newBlock
- if (nme.isLoopHeaderLabel(name))
- ctx1.bb.loopHeader = true
-
- ctx1.labels.get(tree.symbol) match {
- case Some(label) =>
- debuglog("Found existing label for " + tree.symbol.fullLocationString)
- label.anchor(ctx1.bb)
- label.patch(ctx.method.code)
-
- case None =>
- val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
- debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
- ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
- }
-
- ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
- genLoad(rhs, ctx1, expectedType /*toTypeKind(tree.symbol.info.resultType)*/)
-
- case ValDef(_, nme.THIS, _, _) =>
- debuglog("skipping trivial assign to _$this: " + tree)
- ctx
-
- case ValDef(_, _, _, rhs) =>
- val sym = tree.symbol
- val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
+ def genLoadLabelDef = {
+ val ctx1 = ctx.newBlock
+ if (nme.isLoopHeaderLabel(name))
+ ctx1.bb.loopHeader = true
+
+ ctx1.labels.get(tree.symbol) match {
+ case Some(label) =>
+ debuglog("Found existing label for " + tree.symbol.fullLocationString)
+ label.anchor(ctx1.bb)
+ label.patch(ctx.method.code)
+
+ case None =>
+ val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
+ debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
+ ctx1.labels += pair
+ ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+ }
- if (rhs == EmptyTree) {
- debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
- ctx.bb.emit(getZeroOf(local.kind))
+ ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
+ genLoad(rhs, ctx1, expectedType /*toTypeKind(tree.symbol.info.resultType)*/)
}
+ genLoadLabelDef
+
+ case ValDef(_, name, _, rhs) =>
+ def genLoadValDef =
+ if (name == nme.THIS) {
+ debuglog("skipping trivial assign to _$this: " + tree)
+ ctx
+ } else {
+ val sym = tree.symbol
+ val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
+
+ if (rhs == EmptyTree) {
+ debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+ ctx.bb.emit(getZeroOf(local.kind))
+ }
- var ctx1 = ctx
- if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind);
+ var ctx1 = ctx
+ if (rhs != EmptyTree)
+ ctx1 = genLoad(rhs, ctx, local.kind);
- ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
- ctx1.scope.add(local)
- ctx1.bb.emit(SCOPE_ENTER(local))
- generatedType = UNIT
- ctx1
+ ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
+ ctx1.scope.add(local)
+ ctx1.bb.emit(SCOPE_ENTER(local))
+ generatedType = UNIT
+ ctx1
+ }
+ genLoadValDef
case t @ If(cond, thenp, elsep) =>
val (newCtx, resKind) = genLoadIf(t, ctx, expectedType)
@@ -677,51 +667,55 @@ abstract class GenICode extends SubComponent {
newCtx
case Return(expr) =>
- val returnedKind = toTypeKind(expr.tpe)
- debuglog("Return(" + expr + ") with returnedKind = " + returnedKind)
-
- var ctx1 = genLoad(expr, ctx, returnedKind)
- lazy val tmp = ctx1.makeLocal(tree.pos, expr.tpe, "tmp")
- val saved = savingCleanups(ctx1) {
- var savedFinalizer = false
- ctx1.cleanups foreach {
- case MonitorRelease(m) =>
- debuglog("removing " + m + " from cleanups: " + ctx1.cleanups)
- ctx1.bb.emit(Seq(LOAD_LOCAL(m), MONITOR_EXIT()))
- ctx1.exitSynchronized(m)
-
- case Finalizer(f, finalizerCtx) =>
- debuglog("removing " + f + " from cleanups: " + ctx1.cleanups)
- if (returnedKind != UNIT && mayCleanStack(f)) {
- log("Emitting STORE_LOCAL for " + tmp + " to save finalizer.")
- ctx1.bb.emit(STORE_LOCAL(tmp))
- savedFinalizer = true
- }
+ def genLoadReturn = {
+ val returnedKind = toTypeKind(expr.tpe)
+ debuglog("Return(" + expr + ") with returnedKind = " + returnedKind)
+
+ var ctx1 = genLoad(expr, ctx, returnedKind)
+ lazy val tmp = ctx1.makeLocal(tree.pos, expr.tpe, "tmp")
+ val saved = savingCleanups(ctx1) {
+ var savedFinalizer = false
+ ctx1.cleanups foreach {
+ case MonitorRelease(m) =>
+ debuglog("removing " + m + " from cleanups: " + ctx1.cleanups)
+ ctx1.bb.emit(Seq(LOAD_LOCAL(m), MONITOR_EXIT()))
+ ctx1.exitSynchronized(m)
+
+ case Finalizer(f, finalizerCtx) =>
+ debuglog("removing " + f + " from cleanups: " + ctx1.cleanups)
+ if (returnedKind != UNIT && mayCleanStack(f)) {
+ log("Emitting STORE_LOCAL for " + tmp + " to save finalizer.")
+ ctx1.bb.emit(STORE_LOCAL(tmp))
+ savedFinalizer = true
+ }
- // duplicate finalizer (takes care of anchored labels)
- val f1 = duplicateFinalizer(Set.empty ++ ctx1.labels.keySet, ctx1, f)
+ // duplicate finalizer (takes care of anchored labels)
+ val f1 = duplicateFinalizer(Set.empty ++ ctx1.labels.keySet, ctx1, f)
- // we have to run this without the same finalizer in
- // the list, otherwise infinite recursion happens for
- // finalizers that contain 'return'
- val fctx = finalizerCtx.newBlock
- ctx1.bb.closeWith(JUMP(fctx.bb))
- ctx1 = genLoad(f1, fctx, UNIT)
+ // we have to run this without the same finalizer in
+ // the list, otherwise infinite recursion happens for
+ // finalizers that contain 'return'
+ val fctx = finalizerCtx.newBlock
+ ctx1.bb.closeWith(JUMP(fctx.bb))
+ ctx1 = genLoad(f1, fctx, UNIT)
+ }
+ savedFinalizer
}
- savedFinalizer
- }
- if (saved) {
- log("Emitting LOAD_LOCAL for " + tmp + " after saving finalizer.")
- ctx1.bb.emit(LOAD_LOCAL(tmp))
+ if (saved) {
+ log("Emitting LOAD_LOCAL for " + tmp + " after saving finalizer.")
+ ctx1.bb.emit(LOAD_LOCAL(tmp))
+ }
+ adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
+ ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
+ ctx1.bb.enterIgnoreMode
+ generatedType = expectedType
+ ctx1
}
- adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
- ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
- ctx1.bb.enterIgnoreMode
- generatedType = expectedType
- ctx1
+ genLoadReturn
- case t @ Try(_, _, _) => genLoadTry(t, ctx, generatedType = _)
+ case t @ Try(_, _, _) =>
+ genLoadTry(t, ctx, generatedType = _)
case Throw(expr) =>
val (ctx1, expectedType) = genThrow(expr, ctx)
@@ -733,41 +727,42 @@ abstract class GenICode extends SubComponent {
" Call was genLoad" + ((tree, ctx, expectedType)))
case Apply(TypeApply(fun, targs), _) =>
- val sym = fun.symbol
- val cast = sym match {
- case Object_isInstanceOf => false
- case Object_asInstanceOf => true
- case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
- }
+ def genLoadApply1 = {
+ val sym = fun.symbol
+ val cast = sym match {
+ case Object_isInstanceOf => false
+ case Object_asInstanceOf => true
+ case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
+ }
- val Select(obj, _) = fun
- val l = toTypeKind(obj.tpe)
- val r = toTypeKind(targs.head.tpe)
- val ctx1 = genLoadQualifier(fun, ctx)
-
- if (l.isValueType && r.isValueType)
- genConversion(l, r, ctx1, cast)
- else if (l.isValueType) {
- ctx1.bb.emit(DROP(l), fun.pos)
- if (cast) {
- ctx1.bb.emit(Seq(
- NEW(REFERENCE(definitions.ClassCastExceptionClass)),
- DUP(ObjectReference),
- THROW(definitions.ClassCastExceptionClass)
- ))
- } else
- ctx1.bb.emit(CONSTANT(Constant(false)))
- }
- else if (r.isValueType && cast) {
- assert(false, tree) /* Erasure should have added an unboxing operation to prevent that. */
+ val Select(obj, _) = fun
+ val l = toTypeKind(obj.tpe)
+ val r = toTypeKind(targs.head.tpe)
+ val ctx1 = genLoadQualifier(fun, ctx)
+
+ if (l.isValueType && r.isValueType)
+ genConversion(l, r, ctx1, cast)
+ else if (l.isValueType) {
+ ctx1.bb.emit(DROP(l), fun.pos)
+ if (cast) {
+ ctx1.bb.emit(Seq(
+ NEW(REFERENCE(definitions.ClassCastExceptionClass)),
+ DUP(ObjectReference),
+ THROW(definitions.ClassCastExceptionClass)
+ ))
+ } else
+ ctx1.bb.emit(CONSTANT(Constant(false)))
+ } else if (r.isValueType && cast) {
+ assert(false, tree) /* Erasure should have added an unboxing operation to prevent that. */
+ } else if (r.isValueType) {
+ ctx.bb.emit(IS_INSTANCE(REFERENCE(definitions.boxedClass(r.toType.typeSymbol))))
+ } else {
+ genCast(l, r, ctx1, cast)
+ }
+ generatedType = if (cast) r else BOOL;
+ ctx1
}
- else if (r.isValueType)
- ctx.bb.emit(IS_INSTANCE(REFERENCE(definitions.boxedClass(r.toType.typeSymbol))))
- else
- genCast(l, r, ctx1, cast);
-
- generatedType = if (cast) r else BOOL;
- ctx1
+ genLoadApply1
// 'super' call: Note: since constructors are supposed to
// return an instance of what they construct, we have to take
@@ -776,93 +771,102 @@ abstract class GenICode extends SubComponent {
// therefore, we can ignore this fact, and generate code that leaves nothing
// on the stack (contrary to what the type in the AST says).
case Apply(fun @ Select(Super(_, mix), _), args) =>
- debuglog("Call to super: " + tree);
- val invokeStyle = SuperCall(mix)
-// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
+ def genLoadApply2 = {
+ debuglog("Call to super: " + tree);
+ val invokeStyle = SuperCall(mix)
+ // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
- ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx)
+ ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
+ val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx)
- ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos)
- generatedType =
- if (fun.symbol.isConstructor) UNIT
- else toTypeKind(fun.symbol.info.resultType)
- ctx1
+ ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos)
+ generatedType =
+ if (fun.symbol.isConstructor) UNIT
+ else toTypeKind(fun.symbol.info.resultType)
+ ctx1
+ }
+ genLoadApply2
// 'new' constructor call: Note: since constructors are
// thought to return an instance of what they construct,
// we have to 'simulate' it by DUPlicating the freshly created
// instance (on JVM, <init> methods return VOID).
case Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) =>
- val ctor = fun.symbol
- debugassert(ctor.isClassConstructor,
- "'new' call to non-constructor: " + ctor.name)
-
- generatedType = toTypeKind(tpt.tpe)
- debugassert(generatedType.isReferenceType || generatedType.isArrayType,
- "Non reference type cannot be instantiated: " + generatedType)
-
- generatedType match {
- case arr @ ARRAY(elem) =>
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- val dims = arr.dimensions
- var elemKind = arr.elementKind
- if (args.length > dims)
- unit.error(tree.pos, "too many arguments for array constructor: found " + args.length +
- " but array has only " + dims + " dimension(s)")
- if (args.length != dims)
- for (i <- args.length until dims) elemKind = ARRAY(elemKind)
- ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
- ctx1
+ def genLoadApply3 = {
+ val ctor = fun.symbol
+ debugassert(ctor.isClassConstructor,
+ "'new' call to non-constructor: " + ctor.name)
+
+ generatedType = toTypeKind(tpt.tpe)
+ debugassert(generatedType.isReferenceType || generatedType.isArrayType,
+ "Non reference type cannot be instantiated: " + generatedType)
- case rt @ REFERENCE(cls) =>
- debugassert(ctor.owner == cls,
- "Symbol " + ctor.owner.fullName + " is different than " + tpt)
-
- val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
- /* parameterful constructors are the only possible custom constructors,
- a default constructor can't be defined for valuetypes, CLR dixit */
- val isDefaultConstructor = args.isEmpty
- if (isDefaultConstructor) {
- msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
- ctx
+ generatedType match {
+ case arr @ ARRAY(elem) =>
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val dims = arr.dimensions
+ var elemKind = arr.elementKind
+ if (args.length > dims)
+ unit.error(tree.pos, "too many arguments for array constructor: found " + args.length +
+ " but array has only " + dims + " dimension(s)")
+ if (args.length != dims)
+ for (i <- args.length until dims) elemKind = ARRAY(elemKind)
+ ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
+ ctx1
+
+ case rt @ REFERENCE(cls) =>
+ debugassert(ctor.owner == cls,
+ "Symbol " + ctor.owner.fullName + " is different than " + tpt)
+
+ val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
+ /* parameterful constructors are the only possible custom constructors,
+ a default constructor can't be defined for valuetypes, CLR dixit */
+ val isDefaultConstructor = args.isEmpty
+ if (isDefaultConstructor) {
+ msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
+ ctx
+ } else {
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
+ ctx1
+ }
} else {
+ val nw = NEW(rt)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
+
+ val init = CALL_METHOD(ctor, Static(true))
+ nw.init = init
+ ctx1.bb.emit(init, tree.pos)
ctx1
}
- } else {
- val nw = NEW(rt)
- ctx.bb.emit(nw, tree.pos)
- ctx.bb.emit(DUP(generatedType))
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-
- val init = CALL_METHOD(ctor, Static(true))
- nw.init = init
- ctx1.bb.emit(init, tree.pos)
- ctx1
- }
- ctx2
+ ctx2
- case _ =>
- abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
+ case _ =>
+ abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
+ }
}
+ genLoadApply3
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
- debuglog("BOX : " + fun.symbol.fullName);
- val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
- val nativeKind = toTypeKind(expr.tpe)
- if (settings.Xdce.value) {
- // we store this boxed value to a local, even if not really needed.
- // boxing optimization might use it, and dead code elimination will
- // take care of unnecessary stores
- var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
- ctx1.bb.emit(STORE_LOCAL(loc1))
- ctx1.bb.emit(LOAD_LOCAL(loc1))
+ def genLoadApply4 = {
+ debuglog("BOX : " + fun.symbol.fullName);
+ val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
+ val nativeKind = toTypeKind(expr.tpe)
+ if (settings.Xdce.value) {
+ // we store this boxed value to a local, even if not really needed.
+ // boxing optimization might use it, and dead code elimination will
+ // take care of unnecessary stores
+ var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
+ ctx1.bb.emit(STORE_LOCAL(loc1))
+ ctx1.bb.emit(LOAD_LOCAL(loc1))
+ }
+ ctx1.bb.emit(BOX(nativeKind), expr.pos)
+ generatedType = toTypeKind(fun.symbol.tpe.resultType)
+ ctx1
}
- ctx1.bb.emit(BOX(nativeKind), expr.pos)
- generatedType = toTypeKind(fun.symbol.tpe.resultType)
- ctx1
+ genLoadApply4
case Apply(fun @ _, List(expr)) if (definitions.isUnbox(fun.symbol)) =>
debuglog("UNBOX : " + fun.symbol.fullName)
@@ -878,106 +882,88 @@ abstract class GenICode extends SubComponent {
generatedType = toTypeKind(fun.symbol.tpe.resultType)
ctx1
- case app @ Apply(fun @ Select(qual, _), args)
- if !ctx.method.symbol.isStaticConstructor
- && fun.symbol.isAccessor && fun.symbol.accessed.hasStaticAnnotation =>
- // bypass the accessor to the companion object and load the static field directly
- // the only place were this bypass is not done, is the static intializer for the static field itself
- val sym = fun.symbol
- generatedType = toTypeKind(sym.accessed.info)
- val hostClass = qual.tpe.typeSymbol.orElse(sym.owner).companionClass
- val staticfield = hostClass.info.findMember(sym.accessed.name, NoFlags, NoFlags, false)
-
- if (sym.isGetter) {
- ctx.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos)
- ctx
- } else if (sym.isSetter) {
- val ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx)
- ctx1.bb.emit(STORE_FIELD(staticfield, true), tree.pos)
- ctx1.bb.emit(CONSTANT(Constant(false)), tree.pos)
- ctx1
- } else {
- assert(false, "supposedly unreachable")
- ctx
- }
-
case app @ Apply(fun, args) =>
- val sym = fun.symbol
-
- if (sym.isLabel) { // jump to a label
- val label = ctx.labels.getOrElse(sym, {
- // it is a forward jump, scan for labels
- resolveForwardLabel(ctx.defdef, ctx, sym)
- ctx.labels.get(sym) match {
- case Some(l) =>
- log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
- l
- case _ =>
- abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
- }
- })
- // note: when one of the args to genLoadLabelArguments is a jump to a label,
- // it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true,
- // this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer
- // call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored,
- // however, as emitOnly will close the block, which reverses its instructions (when it's still open),
- // we better not reverse when the block has already been closed but is in ignore mode
- // (if it's not in ignore mode, double-closing is an error)
- val ctx1 = genLoadLabelArguments(args, label, ctx)
- ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
- ctx1.bb.enterIgnoreMode
- ctx1
- } else if (isPrimitive(sym)) { // primitive method call
- val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
- generatedType = resKind
- newCtx
- } else { // normal method call
- debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
- val invokeStyle =
- if (sym.isStaticMember)
- Static(false)
- else if (sym.isPrivate || sym.isClassConstructor)
- Static(true)
- else
- Dynamic
-
- var ctx1 =
- if (invokeStyle.hasInstance) {
- if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
- msil_genLoadQualifierAddress(fun, ctx)
+ def genLoadApply6 = {
+ val sym = fun.symbol
+
+ if (sym.isLabel) { // jump to a label
+ val label = ctx.labels.getOrElse(sym, {
+ // it is a forward jump, scan for labels
+ resolveForwardLabel(ctx.defdef, ctx, sym)
+ ctx.labels.get(sym) match {
+ case Some(l) =>
+ log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
+ l
+ case _ =>
+ abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
+ }
+ })
+ // note: when one of the args to genLoadLabelArguments is a jump to a label,
+ // it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true,
+ // this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer
+ // call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored,
+ // however, as emitOnly will close the block, which reverses its instructions (when it's still open),
+ // we better not reverse when the block has already been closed but is in ignore mode
+ // (if it's not in ignore mode, double-closing is an error)
+ val ctx1 = genLoadLabelArguments(args, label, ctx)
+ ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
+ ctx1.bb.enterIgnoreMode
+ ctx1
+ } else if (isPrimitive(sym)) { // primitive method call
+ val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
+ generatedType = resKind
+ newCtx
+ } else { // normal method call
+ debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+ val invokeStyle =
+ if (sym.isStaticMember)
+ Static(false)
+ else if (sym.isPrivate || sym.isClassConstructor)
+ Static(true)
else
- genLoadQualifier(fun, ctx)
- } else ctx
-
- ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
- val cm = CALL_METHOD(sym, invokeStyle)
-
- /** In a couple cases, squirrel away a little extra information in the
- * CALL_METHOD for use by GenJVM.
- */
- fun match {
- case Select(qual, _) =>
- val qualSym = findHostClass(qual.tpe, sym)
-
- if (qualSym == ArrayClass) cm setTargetTypeKind toTypeKind(qual.tpe)
- else cm setHostClass qualSym
-
- log(
- if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullName
- else s"Set more precise host class for ${sym.fullName} hostClass: $qualSym"
- )
- case _ =>
- }
- ctx1.bb.emit(cm, tree.pos)
+ Dynamic
- if (sym == ctx1.method.symbol) {
- ctx1.method.recursive = true
+ var ctx1 =
+ if (invokeStyle.hasInstance) {
+ if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
+ msil_genLoadQualifierAddress(fun, ctx)
+ else
+ genLoadQualifier(fun, ctx)
+ } else ctx
+
+ ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
+ val cm = CALL_METHOD(sym, invokeStyle)
+
+ /** In a couple cases, squirrel away a little extra information in the
+ * CALL_METHOD for use by GenJVM.
+ */
+ fun match {
+ case Select(qual, _) =>
+ val qualSym = findHostClass(qual.tpe, sym)
+ if (qualSym == ArrayClass) {
+ val kind = toTypeKind(qual.tpe)
+ cm setTargetTypeKind kind
+ log(s"Stored target type kind for {$sym.fullName} as $kind")
+ }
+ else {
+ cm setHostClass qualSym
+ if (qual.tpe.typeSymbol != qualSym)
+ log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
+ }
+ case _ =>
+ }
+ ctx1.bb.emit(cm, tree.pos)
+
+ if (sym == ctx1.method.symbol) {
+ ctx1.method.recursive = true
+ }
+ generatedType =
+ if (sym.isClassConstructor) UNIT
+ else toTypeKind(sym.info.resultType);
+ ctx1
}
- generatedType =
- if (sym.isClassConstructor) UNIT
- else toTypeKind(sym.info.resultType);
- ctx1
}
+ genLoadApply6
case ApplyDynamic(qual, args) =>
assert(!forMSIL, tree)
@@ -989,20 +975,22 @@ abstract class GenICode extends SubComponent {
// ctx1
case This(qual) =>
- assert(tree.symbol == ctx.clazz.symbol || tree.symbol.isModuleClass,
- "Trying to access the this of another class: " +
- "tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit)
- if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) {
- genLoadModule(ctx, tree)
- generatedType = REFERENCE(tree.symbol)
- }
- else {
- ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- generatedType = REFERENCE(
- if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
- )
+ def genLoadThis = {
+ assert(tree.symbol == ctx.clazz.symbol || tree.symbol.isModuleClass,
+ "Trying to access the this of another class: " +
+ "tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit)
+ if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) {
+ genLoadModule(ctx, tree)
+ generatedType = REFERENCE(tree.symbol)
+ } else {
+ ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
+ generatedType = REFERENCE(
+ if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
+ )
+ }
+ ctx
}
- ctx
+ genLoadThis
case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
debugassert(tree.symbol.isModule,
@@ -1012,60 +1000,67 @@ abstract class GenICode extends SubComponent {
genLoadModule(ctx, tree)
case Select(qualifier, selector) =>
- val sym = tree.symbol
- generatedType = toTypeKind(sym.info)
- val hostClass = findHostClass(qualifier.tpe, sym)
- log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
-
- if (sym.isModule) {
- genLoadModule(ctx, tree)
- }
- else if (sym.isStaticMember) {
- ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
- ctx
- }
- else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
- ctx1
- }
+ def genLoadSelect = {
+ val sym = tree.symbol
+ generatedType = toTypeKind(sym.info)
+ val hostClass = findHostClass(qualifier.tpe, sym)
+ log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
- case Ident(name) =>
- val sym = tree.symbol
- if (!sym.isPackage) {
if (sym.isModule) {
genLoadModule(ctx, tree)
- generatedType = toTypeKind(sym.info)
}
- else {
- try {
- val Some(l) = ctx.method.lookupLocal(sym)
- ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
- generatedType = l.kind
- } catch {
- case ex: MatchError =>
- abort("symbol " + sym + " does not exist in " + ctx.method)
+ else if (sym.isStaticMember) {
+ ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
+ ctx
+ } else {
+ val ctx1 = genLoadQualifier(tree, ctx)
+ ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
+ ctx1
+ }
+ }
+ genLoadSelect
+
+ case Ident(name) =>
+ def genLoadIdent = {
+ val sym = tree.symbol
+ if (!sym.isPackage) {
+ if (sym.isModule) {
+ genLoadModule(ctx, tree)
+ generatedType = toTypeKind(sym.info)
+ } else {
+ try {
+ val Some(l) = ctx.method.lookupLocal(sym)
+ ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
+ generatedType = l.kind
+ } catch {
+ case ex: MatchError =>
+ abort("symbol " + sym + " does not exist in " + ctx.method)
+ }
}
}
+ ctx
}
- ctx
+ genLoadIdent
case Literal(value) =>
- if (value.tag != UnitTag) (value.tag, expectedType) match {
- case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
- generatedType = LONG
- case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
- generatedType = DOUBLE
- case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
- generatedType = NullReference
- case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
- generatedType = toTypeKind(tree.tpe)
+ def genLoadLiteral = {
+ if (value.tag != UnitTag) (value.tag, expectedType) match {
+ case (IntTag, LONG) =>
+ ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+ generatedType = LONG
+ case (FloatTag, DOUBLE) =>
+ ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+ generatedType = DOUBLE
+ case (NullTag, _) =>
+ ctx.bb.emit(CONSTANT(value), tree.pos);
+ generatedType = NullReference
+ case _ =>
+ ctx.bb.emit(CONSTANT(value), tree.pos);
+ generatedType = toTypeKind(tree.tpe)
+ }
+ ctx
}
- ctx
+ genLoadLiteral
case Block(stats, expr) =>
ctx.enterScope
@@ -1085,66 +1080,72 @@ abstract class GenICode extends SubComponent {
genStat(tree, ctx)
case ArrayValue(tpt @ TypeTree(), _elems) =>
- var ctx1 = ctx
- val elmKind = toTypeKind(tpt.tpe)
- generatedType = ARRAY(elmKind)
- val elems = _elems.toIndexedSeq
-
- ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos)
- ctx1.bb.emit(CREATE_ARRAY(elmKind, 1))
- // inline array literals
- var i = 0
- while (i < elems.length) {
- ctx1.bb.emit(DUP(generatedType), tree.pos)
- ctx1.bb.emit(CONSTANT(new Constant(i)))
- ctx1 = genLoad(elems(i), ctx1, elmKind)
- ctx1.bb.emit(STORE_ARRAY_ITEM(elmKind))
- i = i + 1
+ def genLoadArrayValue = {
+ var ctx1 = ctx
+ val elmKind = toTypeKind(tpt.tpe)
+ generatedType = ARRAY(elmKind)
+ val elems = _elems.toIndexedSeq
+
+ ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos)
+ ctx1.bb.emit(CREATE_ARRAY(elmKind, 1))
+ // inline array literals
+ var i = 0
+ while (i < elems.length) {
+ ctx1.bb.emit(DUP(generatedType), tree.pos)
+ ctx1.bb.emit(CONSTANT(new Constant(i)))
+ ctx1 = genLoad(elems(i), ctx1, elmKind)
+ ctx1.bb.emit(STORE_ARRAY_ITEM(elmKind))
+ i = i + 1
+ }
+ ctx1
}
- ctx1
+ genLoadArrayValue
case Match(selector, cases) =>
- debuglog("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
- val afterCtx = ctx1.newBlock
- var caseCtx: Context = null
- generatedType = toTypeKind(tree.tpe)
-
- var targets: List[BasicBlock] = Nil
- var tags: List[Int] = Nil
- var default: BasicBlock = afterCtx.bb
-
- for (caze @ CaseDef(pat, guard, body) <- cases) {
- assert(guard == EmptyTree, guard)
- val tmpCtx = ctx1.newBlock
- pat match {
- case Literal(value) =>
- tags = value.intValue :: tags
- targets = tmpCtx.bb :: targets
- case Ident(nme.WILDCARD) =>
- default = tmpCtx.bb
- case Alternative(alts) =>
- alts foreach {
- case Literal(value) =>
- tags = value.intValue :: tags
- targets = tmpCtx.bb :: targets
- case _ =>
- abort("Invalid case in alternative in switch-like pattern match: " +
- tree + " at: " + tree.pos)
- }
- case _ =>
- abort("Invalid case statement in switch-like pattern match: " +
- tree + " at: " + (tree.pos))
- }
+ def genLoadMatch = {
+ debuglog("Generating SWITCH statement.");
+ var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
+ val afterCtx = ctx1.newBlock
+ var caseCtx: Context = null
+ generatedType = toTypeKind(tree.tpe)
+
+ var targets: List[BasicBlock] = Nil
+ var tags: List[Int] = Nil
+ var default: BasicBlock = afterCtx.bb
+
+ for (caze @ CaseDef(pat, guard, body) <- cases) {
+ assert(guard == EmptyTree, guard)
+ val tmpCtx = ctx1.newBlock
+ pat match {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case Ident(nme.WILDCARD) =>
+ default = tmpCtx.bb
+ case Alternative(alts) =>
+ alts foreach {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case _ =>
+ abort("Invalid case in alternative in switch-like pattern match: " +
+ tree + " at: " + tree.pos)
+ }
+ case _ =>
+ abort("Invalid case statement in switch-like pattern match: " +
+ tree + " at: " + (tree.pos))
+ }
- caseCtx = genLoad(body, tmpCtx, generatedType)
- // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
- caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
+ caseCtx = genLoad(body, tmpCtx, generatedType)
+ // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
+ caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
+ }
+ ctx1.bb.emitOnly(
+ SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
+ )
+ afterCtx
}
- ctx1.bb.emitOnly(
- SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
- )
- afterCtx
+ genLoadMatch
case EmptyTree =>
if (expectedType != UNIT)
@@ -1672,12 +1673,8 @@ abstract class GenICode extends SubComponent {
* backend emits them as static).
* No code is needed for this module symbol.
*/
- for (
- f <- cls.info.decls;
- if !f.isMethod && f.isTerm && !f.isModule && !(f.owner.isModuleClass && f.hasStaticAnnotation)
- ) {
+ for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
ctx.clazz addField new IField(f)
- }
}
/**
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 81fd285cdb..d43013c644 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -2,11 +2,6 @@
* Copyright 2005-2012 LAMP/EPFL
* @author Martin Odersky
*/
-/* NSC -- new scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
package scala.tools.nsc
package backend
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 1c5c9224f2..4739750daa 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -231,7 +231,7 @@ trait Linearizers {
val handlersByCovered = m.exh.groupBy(_.covered)
// number of basic blocks covered by the entire try-catch expression
- def size(covered: collection.immutable.Set[BasicBlock]) = {
+ def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
val hs = handlersByCovered(covered)
covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 4311fe9df5..63f0ab683b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -82,10 +82,17 @@ trait Opcodes { self: ICodes =>
final val jumpsCat = 10
final val retCat = 11
+ private lazy val ObjectReferenceList = ObjectReference :: Nil
+
/** This class represents an instruction of the intermediate code.
* Each case subclass will represent a specific operation.
*/
abstract class Instruction extends Cloneable {
+ // Vlad: I used these for checking the quality of the implementation, and we should regularely run a build with them
+ // enabled. But for production these should definitely be disabled, unless we enjoy getting angry emails from Greg :)
+ //if (!this.isInstanceOf[opcodes.LOAD_EXCEPTION])
+ // assert(consumed == consumedTypes.length)
+ //assert(produced == producedTypes.length)
def category: Int = 0 // undefined
@@ -101,6 +108,7 @@ trait Opcodes { self: ICodes =>
def consumedTypes: List[TypeKind] = Nil
/** This instruction produces these types on top of the stack. */
+ // Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
def producedTypes: List[TypeKind] = Nil
/** This method returns the difference of size of the stack when the instruction is used */
@@ -120,7 +128,7 @@ trait Opcodes { self: ICodes =>
}
/** Clone this instruction. */
- override def clone: Instruction =
+ override def clone(): Instruction =
super.clone.asInstanceOf[Instruction]
}
@@ -143,7 +151,12 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(REFERENCE(clasz))
+ override def producedTypes =
+ // we're not allowed to have REFERENCE(Array), but what about compiling the Array class? Well, we use object for it.
+ if (clasz != global.definitions.ArrayClass)
+ REFERENCE(clasz) :: Nil
+ else
+ ObjectReference :: Nil
override def category = localsCat
}
@@ -157,7 +170,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(toTypeKind(constant.tpe))
+ override def producedTypes = toTypeKind(constant.tpe) :: Nil
override def category = constCat
}
@@ -171,8 +184,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 1
- override def consumedTypes = List(ARRAY(kind), INT)
- override def producedTypes = List(kind)
+ override def consumedTypes = ARRAY(kind) :: INT :: Nil
+ override def producedTypes = kind :: Nil
override def category = arraysCat
}
@@ -185,7 +198,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(local.kind)
+ override def producedTypes = local.kind :: Nil
override def category = localsCat
}
@@ -203,8 +216,8 @@ trait Opcodes { self: ICodes =>
override def consumed = if (isStatic) 0 else 1
override def produced = 1
- override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
- override def producedTypes = List(toTypeKind(field.tpe));
+ override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil
+ override def producedTypes = toTypeKind(field.tpe) :: Nil
// more precise information about how to load this field
// see #4283
@@ -222,7 +235,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(REFERENCE(module))
+ override def producedTypes = REFERENCE(module) :: Nil
override def category = stackCat
}
@@ -235,7 +248,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 3
override def produced = 0
- override def consumedTypes = List(ARRAY(kind), INT, kind)
+ override def consumedTypes = ARRAY(kind) :: INT :: kind :: Nil
override def category = arraysCat
}
@@ -248,7 +261,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
- override def consumedTypes = List(local.kind)
+ override def consumedTypes = local.kind :: Nil
override def category = localsCat
}
@@ -267,9 +280,9 @@ trait Opcodes { self: ICodes =>
override def consumedTypes =
if (isStatic)
- List(toTypeKind(field.tpe))
+ toTypeKind(field.tpe) :: Nil
else
- List(REFERENCE(field.owner), toTypeKind(field.tpe));
+ REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
override def category = fldsCat
}
@@ -281,7 +294,7 @@ trait Opcodes { self: ICodes =>
case class STORE_THIS(kind: TypeKind) extends Instruction {
override def consumed = 1
override def produced = 0
- override def consumedTypes = List(kind)
+ override def consumedTypes = kind :: Nil
override def category = localsCat
}
@@ -308,34 +321,34 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def consumedTypes = primitive match {
- case Negation(kind) => List(kind)
- case Test(_, kind, true) => List(kind)
- case Test(_, kind, false) => List(kind, kind)
- case Comparison(_, kind) => List(kind, kind)
- case Arithmetic(NOT, kind) => List(kind)
- case Arithmetic(_, kind) => List(kind, kind)
- case Logical(_, kind) => List(kind, kind)
- case Shift(_, kind) => List(kind, INT)
- case Conversion(from, _) => List(from)
- case ArrayLength(kind) => List(ARRAY(kind))
- case StringConcat(kind) => List(ConcatClass, kind)
+ case Negation(kind) => kind :: Nil
+ case Test(_, kind, true) => kind :: Nil
+ case Test(_, kind, false) => kind :: kind :: Nil
+ case Comparison(_, kind) => kind :: kind :: Nil
+ case Arithmetic(NOT, kind) => kind :: Nil
+ case Arithmetic(_, kind) => kind :: kind :: Nil
+ case Logical(_, kind) => kind :: kind :: Nil
+ case Shift(_, kind) => kind :: INT :: Nil
+ case Conversion(from, _) => from :: Nil
+ case ArrayLength(kind) => ARRAY(kind) :: Nil
+ case StringConcat(kind) => ConcatClass :: kind :: Nil
case StartConcat => Nil
- case EndConcat => List(ConcatClass)
+ case EndConcat => ConcatClass :: Nil
}
override def producedTypes = primitive match {
- case Negation(kind) => List(kind)
- case Test(_, _, true) => List(BOOL)
- case Test(_, _, false) => List(BOOL)
- case Comparison(_, _) => List(INT)
- case Arithmetic(_, kind) => List(kind)
- case Logical(_, kind) => List(kind)
- case Shift(_, kind) => List(kind)
- case Conversion(_, to) => List(to)
- case ArrayLength(_) => List(INT)
- case StringConcat(_) => List(ConcatClass)
- case StartConcat => List(ConcatClass)
- case EndConcat => List(REFERENCE(global.definitions.StringClass))
+ case Negation(kind) => kind :: Nil
+ case Test(_, _, true) => BOOL :: Nil
+ case Test(_, _, false) => BOOL :: Nil
+ case Comparison(_, _) => INT :: Nil
+ case Arithmetic(_, kind) => kind :: Nil
+ case Logical(_, kind) => kind :: Nil
+ case Shift(_, kind) => kind :: Nil
+ case Conversion(_, to) => to :: Nil
+ case ArrayLength(_) => INT :: Nil
+ case StringConcat(_) => ConcatClass :: Nil
+ case StartConcat => ConcatClass :: Nil
+ case EndConcat => REFERENCE(global.definitions.StringClass) :: Nil
}
override def category = arilogCat
@@ -381,14 +394,13 @@ trait Opcodes { self: ICodes =>
else args
}
- override def produced =
- if (producedType == UNIT || method.isConstructor) 0
- else 1
-
- private def producedType: TypeKind = toTypeKind(method.info.resultType)
- override def producedTypes =
- if (produced == 0) Nil
- else List(producedType)
+ private val producedList = toTypeKind(method.info.resultType) match {
+ case UNIT => Nil
+ case _ if method.isConstructor => Nil
+ case kind => kind :: Nil
+ }
+ override def produced = producedList.size
+ override def producedTypes = producedList
/** object identity is equality for CALL_METHODs. Needed for
* being able to store such instructions into maps, when more
@@ -404,15 +416,17 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def consumedTypes = boxType :: Nil
override def produced = 1
+ override def producedTypes = BOXED(boxType) :: Nil
override def category = objsCat
}
case class UNBOX(boxType: TypeKind) extends Instruction {
- assert(boxType.isValueType && (boxType ne UNIT)) // documentation
+ assert(boxType.isValueType && !boxType.isInstanceOf[BOXED] && (boxType ne UNIT)) // documentation
override def toString(): String = "UNBOX " + boxType
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil
+ override def consumedTypes = ObjectReferenceList
override def produced = 1
+ override def producedTypes = boxType :: Nil
override def category = objsCat
}
@@ -426,6 +440,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 0;
override def produced = 1;
+ override def producedTypes = kind :: Nil
/** The corresponding constructor call. */
var init: CALL_METHOD = _
@@ -445,6 +460,7 @@ trait Opcodes { self: ICodes =>
override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
override def produced = 1;
+ override def producedTypes = ARRAY(elem) :: Nil
override def category = arraysCat
}
@@ -458,8 +474,9 @@ trait Opcodes { self: ICodes =>
override def toString(): String ="IS_INSTANCE "+typ
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil
override def produced = 1
+ override def consumedTypes = ObjectReferenceList
+ override def producedTypes = BOOL :: Nil
override def category = castsCat
}
@@ -474,8 +491,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 1
- override val consumedTypes = List(ObjectReference)
- override def producedTypes = List(typ)
+ override def consumedTypes = ObjectReferenceList
+ override def producedTypes = typ :: Nil
override def category = castsCat
}
@@ -495,7 +512,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
- override val consumedTypes = List(INT)
+ override def consumedTypes = INT :: Nil
def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
@@ -536,7 +553,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 0
- override val consumedTypes = List(kind, kind)
+ override def consumedTypes = kind :: kind :: Nil
override def category = jumpsCat
}
@@ -559,8 +576,7 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
- override val consumedTypes = List(kind)
-
+ override def consumedTypes = kind :: Nil
override def category = jumpsCat
}
@@ -573,7 +589,7 @@ trait Opcodes { self: ICodes =>
override def consumed = if (kind == UNIT) 0 else 1
override def produced = 0
- // TODO override val consumedTypes = List(kind)
+ override def consumedTypes = if (kind == UNIT) Nil else kind :: Nil
override def category = retCat
}
@@ -592,6 +608,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+ override def consumedTypes = toTypeKind(clasz.tpe) :: Nil
+
override def category = retCat
}
@@ -606,6 +624,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+ override def consumedTypes = typ :: Nil
+
override def category = stackCat
}
@@ -616,6 +636,8 @@ trait Opcodes { self: ICodes =>
case class DUP (typ: TypeKind) extends Instruction {
override def consumed = 1
override def produced = 2
+ override def consumedTypes = typ :: Nil
+ override def producedTypes = typ :: typ :: Nil
override def category = stackCat
}
@@ -630,6 +652,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+ override def consumedTypes = ObjectReference :: Nil
+
override def category = objsCat
}
@@ -644,6 +668,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 1;
override def produced = 0;
+ override def consumedTypes = ObjectReference :: Nil
+
override def category = objsCat
}
@@ -738,10 +764,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(msil_mgdptr(local.kind))
+ override def producedTypes = msil_mgdptr(local.kind) :: Nil
override def category = localsCat
- }
+ }
case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
@@ -751,11 +777,11 @@ trait Opcodes { self: ICodes =>
override def consumed = if (isStatic) 0 else 1
override def produced = 1
- override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
- override def producedTypes = List(msil_mgdptr(REFERENCE(field.owner)));
+ override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil;
+ override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil;
override def category = fldsCat
-}
+ }
case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
/** Returns a string representation of this instruction */
@@ -764,8 +790,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 1
- override def consumedTypes = List(ARRAY(kind), INT)
- override def producedTypes = List(msil_mgdptr(kind))
+ override def consumedTypes = ARRAY(kind) :: INT :: Nil
+ override def producedTypes = msil_mgdptr(kind) :: Nil
override def category = arraysCat
}
@@ -773,16 +799,16 @@ trait Opcodes { self: ICodes =>
case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
override def toString(): String = "CIL_UNBOX " + valueType
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil // actually consumes a 'boxed valueType'
+ override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType'
override def produced = 1
- override def producedTypes = List(msil_mgdptr(valueType))
+ override def producedTypes = msil_mgdptr(valueType) :: Nil
override def category = objsCat
}
case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
override def toString(): String = "CIL_INITOBJ " + valueType
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil // actually consumes a managed pointer
+ override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer
override def produced = 0
override def category = objsCat
}
@@ -793,9 +819,8 @@ trait Opcodes { self: ICodes =>
override def consumed = method.tpe.paramTypes.length
override def consumedTypes = method.tpe.paramTypes map toTypeKind
override def produced = 1
- override def producedTypes = List(toTypeKind(method.tpe.resultType))
+ override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil
override def category = objsCat
}
-
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index d1d8e4a385..df158a29ea 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -200,7 +200,7 @@ abstract class CopyPropagation {
in(b) = lattice.bottom
out(b) = lattice.bottom
assert(out.contains(b), out)
- log("Added point: " + b)
+ debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
@@ -531,11 +531,11 @@ abstract class CopyPropagation {
case 0 => ()
case 1 if ctor.tpe.paramTypes.head == ctor.owner.rawowner.tpe =>
// it's an unused outer
- log("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
+ debuglog("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
paramTypes = paramTypes.tail
values = values.tail
case _ =>
- log("giving up on " + ctor + "(diff: " + diff + ")")
+ debuglog("giving up on " + ctor + "(diff: " + diff + ")")
return bindings
}
@@ -566,7 +566,7 @@ abstract class CopyPropagation {
method.blocks map { b =>
"\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
"\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
- }
+ }
).mkString
} /* class CopyAnalysis */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 6bd3ac5791..5d81109ac9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -480,7 +480,7 @@ abstract class TypeFlowAnalysis {
val knownUnsafe = mutable.Set.empty[Symbol]
val knownSafe = mutable.Set.empty[Symbol]
val knownNever = mutable.Set.empty[Symbol] // `knownNever` needs be cleared only at the very end of the inlining phase (unlike `knownUnsafe` and `knownSafe`)
- @inline final def blackballed(msym: Symbol): Boolean = { knownUnsafe(msym) || knownNever(msym) }
+ final def blackballed(msym: Symbol): Boolean = { knownUnsafe(msym) || knownNever(msym) }
val relevantBBs = mutable.Set.empty[BasicBlock]
@@ -640,7 +640,7 @@ abstract class TypeFlowAnalysis {
For each of them, its `lastInstruction` (after which no more typeflows are needed) is found.
*/
- def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: collection.Set[BasicBlock], staleIn: collection.Set[BasicBlock]) {
+ def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: scala.collection.Set[BasicBlock], staleIn: scala.collection.Set[BasicBlock]) {
if (this.method == null || this.method.symbol != m.symbol) {
init(m)
return
@@ -691,7 +691,7 @@ abstract class TypeFlowAnalysis {
bs foreach enqueue
}
- private def blankOut(blocks: collection.Set[BasicBlock]) {
+ private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
blocks foreach { b =>
in(b) = typeFlowLattice.bottom
out(b) = typeFlowLattice.bottom
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 485864d8e3..ef3e82a75a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.util.JavapClass
import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
import Attributes.Name
-import language.postfixOps
+import scala.language.postfixOps
/** For the last mile: turning generated bytecode in memory into
* something you can use. Has implementations for writing to class
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 194ceca504..4738ad8a38 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -154,8 +154,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
inform("[running phase " + name + " on icode]")
if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ log(s"Optimizer eliminated ${sym.fullNameString}")
icodes.classes -= sym
+ }
// For predictably ordered error messages.
var sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
@@ -227,11 +229,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
binarynme.RuntimeNull.toString() -> RuntimeNullClass
)
- private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
-
- @inline final private def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0)
-
- @inline final private def isRemote(s: Symbol) = (s hasAnnotation RemoteAttr)
+ private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+ private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
+ private def isRemote(s: Symbol) = s hasAnnotation RemoteAttr
/**
* Return the Java modifiers for the given symbol.
@@ -277,7 +277,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// Nested objects won't receive ACC_FINAL in order to allow for their overriding.
val finalFlag = (
- (sym.hasFlag(Flags.FINAL) || isTopLevelModule(sym))
+ (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
&& !sym.enclClass.isInterface
&& !sym.isClassConstructor
&& !sym.isMutable // lazy vals and vars both
@@ -295,7 +295,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isHidden) ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
@@ -384,8 +384,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
fcs
}
- @inline final private def jvmWiseLUB(a: Symbol, b: Symbol): Symbol = {
-
+ private def jvmWiseLUB(a: Symbol, b: Symbol): Symbol = {
assert(a.isClass)
assert(b.isClass)
@@ -448,6 +447,17 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
val JAVA_LANG_OBJECT = asm.Type.getObjectType("java/lang/Object")
val JAVA_LANG_STRING = asm.Type.getObjectType("java/lang/String")
+ /**
+ * We call many Java varargs methods from ASM library that expect Arra[asm.Type] as argument so
+ * we override default (compiler-generated) ClassTag so we can provide specialized newArray implementation.
+ *
+ * Examples of methods that should pick our definition are: JBuilder.javaType and JPlainBuilder.genMethod.
+ */
+ private implicit val asmTypeTag: scala.reflect.ClassTag[asm.Type] = new scala.reflect.ClassTag[asm.Type] {
+ def runtimeClass: java.lang.Class[asm.Type] = classOf[asm.Type]
+ final override def newArray(len: Int): Array[asm.Type] = new Array[asm.Type](len)
+ }
+
/** basic functionality for class file building */
abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
@@ -641,7 +651,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
def javaType(s: Symbol): asm.Type = {
if (s.isMethod) {
val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
- asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _* )
+ asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
} else { javaType(s.tpe) }
}
@@ -851,7 +861,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// generic information could disappear as a consequence of a seemingly
// unrelated change.
settings.Ynogenericsig.value
- || sym.isHidden
+ || sym.isArtifact
|| sym.isLiftedMethod
|| sym.isBridge
|| (sym.ownerChain exists (_.isImplClass))
@@ -1177,9 +1187,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
log(s"No forwarder for non-public member $m")
else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
- if (m.isAccessor && m.accessed.hasStaticAnnotation) {
- log("@static: accessor " + m + ", accessed: " + m.accessed)
- } else addForwarder(isRemoteClass, jclass, moduleClass, m)
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
}
}
}
@@ -1533,7 +1541,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
var jmethod: asm.MethodVisitor = _
var jMethodName: String = _
- @inline final def emit(opc: Int) { jmethod.visitInsn(opc) }
+ final def emit(opc: Int) { jmethod.visitInsn(opc) }
def genMethod(m: IMethod, isJInterface: Boolean) {
@@ -1684,7 +1692,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
- computeLocalVarsIndex(m)
genCode(m, false, true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
@@ -1771,7 +1778,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
else { jmethod.visitLdcInsn(cst) }
}
- @inline final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
+ final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
def iconst(cst: Int) {
if (cst >= -1 && cst <= 5) {
@@ -1837,44 +1844,44 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
- @inline def load( idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ILOAD, idx, tk) }
- @inline def store(idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ISTORE, idx, tk) }
+ def load( idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ILOAD, idx, tk) }
+ def store(idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ISTORE, idx, tk) }
- @inline def aload( tk: TypeKind) { emitTypeBased(aloadOpcodes, tk) }
- @inline def astore(tk: TypeKind) { emitTypeBased(astoreOpcodes, tk) }
+ def aload( tk: TypeKind) { emitTypeBased(aloadOpcodes, tk) }
+ def astore(tk: TypeKind) { emitTypeBased(astoreOpcodes, tk) }
- @inline def neg(tk: TypeKind) { emitPrimitive(negOpcodes, tk) }
- @inline def add(tk: TypeKind) { emitPrimitive(addOpcodes, tk) }
- @inline def sub(tk: TypeKind) { emitPrimitive(subOpcodes, tk) }
- @inline def mul(tk: TypeKind) { emitPrimitive(mulOpcodes, tk) }
- @inline def div(tk: TypeKind) { emitPrimitive(divOpcodes, tk) }
- @inline def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) }
+ def neg(tk: TypeKind) { emitPrimitive(negOpcodes, tk) }
+ def add(tk: TypeKind) { emitPrimitive(addOpcodes, tk) }
+ def sub(tk: TypeKind) { emitPrimitive(subOpcodes, tk) }
+ def mul(tk: TypeKind) { emitPrimitive(mulOpcodes, tk) }
+ def div(tk: TypeKind) { emitPrimitive(divOpcodes, tk) }
+ def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) }
- @inline def invokespecial(owner: String, name: String, desc: String) {
+ def invokespecial(owner: String, name: String, desc: String) {
jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
}
- @inline def invokestatic(owner: String, name: String, desc: String) {
+ def invokestatic(owner: String, name: String, desc: String) {
jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
}
- @inline def invokeinterface(owner: String, name: String, desc: String) {
+ def invokeinterface(owner: String, name: String, desc: String) {
jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
}
- @inline def invokevirtual(owner: String, name: String, desc: String) {
+ def invokevirtual(owner: String, name: String, desc: String) {
jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
}
- @inline def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
- @inline def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
- @inline def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
- @inline def emitIF_ACMP(cond: TestOp, label: asm.Label) {
+ def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
+ def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
+ def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ def emitIF_ACMP(cond: TestOp, label: asm.Label) {
assert((cond == EQ) || (cond == NE), cond)
val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
jmethod.visitJumpInsn(opc, label)
}
- @inline def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
- @inline def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
+ def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
+ def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
- @inline def emitRETURN(tk: TypeKind) {
+ def emitRETURN(tk: TypeKind) {
if(tk == UNIT) { jmethod.visitInsn(Opcodes.RETURN) }
else { emitTypeBased(returnOpcodes, tk) }
}
@@ -2031,12 +2038,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
var isModuleInitialized = false
- val labels: collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
+ val labels: scala.collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
val onePastLast = new asm.Label // token for the mythical instruction past the last instruction in the method being emitted
// maps a BasicBlock b to the Label that corresponds to b's successor in the linearization. The last BasicBlock is mapped to the onePastLast label.
- val linNext: collection.Map[BasicBlock, asm.Label] = {
+ val linNext: scala.collection.Map[BasicBlock, asm.Label] = {
val result = mutable.HashMap.empty[BasicBlock, asm.Label]
var rest = linearization
var prev = rest.head
@@ -2151,8 +2158,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
case class LocVarEntry(local: Local, start: asm.Label, end: asm.Label) // start is inclusive while end exclusive.
case class Interval(lstart: asm.Label, lend: asm.Label) {
- @inline final def start = lstart.getOffset
- @inline final def end = lend.getOffset
+ final def start = lstart.getOffset
+ final def end = lend.getOffset
def precedes(that: Interval): Boolean = { this.end < that.start }
@@ -2214,18 +2221,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- def getMerged(): collection.Map[Local, List[Interval]] = {
+ def getMerged(): scala.collection.Map[Local, List[Interval]] = {
// TODO should but isn't: unbalanced start(s) of scope(s)
val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
-
- val merged = mutable.Map.empty[Local, List[Interval]]
-
- def addToMerged(lv: Local, start: Label, end: Label) {
- val ranges = merged.getOrElseUpdate(lv, Nil)
- val coalesced = fuse(ranges, Interval(start, end))
- merged.update(lv, coalesced)
- }
-
+ val merged = mutable.Map[Local, List[Interval]]()
+ def addToMerged(lv: Local, start: Label, end: Label) {
+ val intv = Interval(start, end)
+ merged(lv) = if (merged contains lv) fuse(merged(lv), intv) else intv :: Nil
+ }
for(LocVarEntry(lv, start, end) <- seen) { addToMerged(lv, start, end) }
/* for each var with unbalanced start(s) of scope(s):
@@ -2375,8 +2378,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
def genBlock(b: BasicBlock) {
jmethod.visitLabel(labels(b))
- import asm.Opcodes;
-
debuglog("Generating code for block: " + b)
// val lastInstr = b.lastInstruction
@@ -2395,287 +2396,308 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
}
}
- (instr.category: @scala.annotation.switch) match {
-
- case icodes.localsCat => (instr: @unchecked) match {
- case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
- case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
- case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jmethod.visitVarInsn(Opcodes.ASTORE, 0)
-
- case SCOPE_ENTER(lv) =>
- // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if(relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val start = new asm.Label
- jmethod.visitLabel(start)
- scoping.pushScope(lv, start)
- }
+ genInstr(instr, b)
- case SCOPE_EXIT(lv) =>
- val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
- if(relevant) {
- // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
- // similarly, these labels aren't tracked in the `labels` map.
- val end = new asm.Label
- jmethod.visitLabel(end)
- scoping.popScope(lv, end, instr.pos)
- }
- }
+ }
- case icodes.stackCat => (instr: @unchecked) match {
+ }
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
- jmethod.visitVarInsn(Opcodes.ALOAD, 0)
- } else {
- jmethod.visitFieldInsn(
- Opcodes.GETSTATIC,
- javaName(module) /* + "$" */ ,
- strMODULE_INSTANCE_FIELD,
- descriptor(module)
- )
- }
+ def genInstr(instr: Instruction, b: BasicBlock) {
+ import asm.Opcodes
+ (instr.category: @scala.annotation.switch) match {
+
+ case icodes.localsCat =>
+ def genLocalInstr() = (instr: @unchecked) match {
+ case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
+ case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+
+ case SCOPE_ENTER(lv) =>
+ // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if (relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val start = new asm.Label
+ jmethod.visitLabel(start)
+ scoping.pushScope(lv, start)
+ }
- case DROP(kind) => emit(if(kind.isWideType) Opcodes.POP2 else Opcodes.POP)
+ case SCOPE_EXIT(lv) =>
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if (relevant) {
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val end = new asm.Label
+ jmethod.visitLabel(end)
+ scoping.popScope(lv, end, instr.pos)
+ }
+ }
+ genLocalInstr
+
+ case icodes.stackCat =>
+ def genStackInstr() = (instr: @unchecked) match {
+
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ } else {
+ jmethod.visitFieldInsn(
+ Opcodes.GETSTATIC,
+ javaName(module) /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ descriptor(module))
+ }
- case DUP(kind) => emit(if(kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
+ case DROP(kind) => emit(if (kind.isWideType) Opcodes.POP2 else Opcodes.POP)
- case LOAD_EXCEPTION(_) => ()
- }
+ case DUP(kind) => emit(if (kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
- case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
+ case LOAD_EXCEPTION(_) => ()
+ }
+ genStackInstr
- case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
+ case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
- case icodes.castsCat => (instr: @unchecked) match {
+ case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
- case IS_INSTANCE(tpe) =>
- val jtyp: asm.Type =
- tpe match {
- case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
- case ARRAY(elem) => javaArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
- jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
+ case icodes.castsCat =>
+ def genCastInstr() = (instr: @unchecked) match {
- case CHECK_CAST(tpe) =>
+ case IS_INSTANCE(tpe) =>
+ val jtyp: asm.Type =
tpe match {
-
- case REFERENCE(cls) =>
- if (cls != ObjectClass) { // No need to checkcast for Objects
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
- }
-
- case ARRAY(elem) =>
- val iname = javaArrayType(javaType(elem)).getInternalName
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
-
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
}
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
- }
-
- case icodes.objsCat => (instr: @unchecked) match {
+ case CHECK_CAST(tpe) =>
+ tpe match {
- case BOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
+ case REFERENCE(cls) =>
+ if (cls != ObjectClass) { // No need to checkcast for Objects
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
+ }
- case UNBOX(kind) =>
- val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
- jcode.invokestatic(BoxesRunTime, mname, mdesc)
+ case ARRAY(elem) =>
+ val iname = javaArrayType(javaType(elem)).getInternalName
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jmethod.visitTypeInsn(Opcodes.NEW, className)
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
- case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
- case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
- }
+ }
+ genCastInstr
- case icodes.fldsCat => (instr: @unchecked) match {
+ case icodes.objsCat =>
+ def genObjsInstr() = (instr: @unchecked) match {
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+ case BOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldDescr = descriptor(field)
- val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
- jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+ case UNBOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
- }
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jmethod.visitTypeInsn(Opcodes.NEW, className)
- case icodes.mthdsCat => (instr: @unchecked) match {
+ case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+ case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+ }
+ genObjsInstr
+
+ case icodes.fldsCat =>
+ def genFldsInstr() = (instr: @unchecked) match {
+
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getInternalName
- jcode.invokevirtual(target, "clone", mdesc_arrayClone)
+ }
+ genFldsInstr
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
+ case icodes.mthdsCat =>
+ def genMethodsInstr() = (instr: @unchecked) match {
- }
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getInternalName
+ jcode.invokevirtual(target, "clone", mdesc_arrayClone)
- case icodes.arraysCat => (instr: @unchecked) match {
- case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
- case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
- case CREATE_ARRAY(elem, 1) => jcode newarray elem
- case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
- }
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
- case icodes.jumpsCat => (instr: @unchecked) match {
-
- case sw @ SWITCH(tagss, branches) =>
- assert(branches.length == tagss.length + 1, sw)
- val flatSize = sw.flatTagsCount
- val flatKeys = new Array[Int](flatSize)
- val flatBranches = new Array[asm.Label](flatSize)
-
- var restTagss = tagss
- var restBranches = branches
- var k = 0 // ranges over flatKeys and flatBranches
- while(restTagss.nonEmpty) {
- val currLabel = labels(restBranches.head)
- for(cTag <- restTagss.head) {
- flatKeys(k) = cTag;
- flatBranches(k) = currLabel
- k += 1
- }
- restTagss = restTagss.tail
- restBranches = restBranches.tail
+ }
+ genMethodsInstr
+
+ case icodes.arraysCat =>
+ def genArraysInstr() = (instr: @unchecked) match {
+ case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
+ case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+ case CREATE_ARRAY(elem, 1) => jcode newarray elem
+ case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+ }
+ genArraysInstr
+
+ case icodes.jumpsCat =>
+ def genJumpInstr() = (instr: @unchecked) match {
+
+ case sw @ SWITCH(tagss, branches) =>
+ assert(branches.length == tagss.length + 1, sw)
+ val flatSize = sw.flatTagsCount
+ val flatKeys = new Array[Int](flatSize)
+ val flatBranches = new Array[asm.Label](flatSize)
+
+ var restTagss = tagss
+ var restBranches = branches
+ var k = 0 // ranges over flatKeys and flatBranches
+ while (restTagss.nonEmpty) {
+ val currLabel = labels(restBranches.head)
+ for (cTag <- restTagss.head) {
+ flatKeys(k) = cTag;
+ flatBranches(k) = currLabel
+ k += 1
}
- val defaultLabel = labels(restBranches.head)
- assert(restBranches.tail.isEmpty)
- debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
- jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
-
- case JUMP(whereto) =>
- if (nextBlock != whereto) {
- jcode goTo labels(whereto)
- } else if(m.exh.exists(eh => eh.covers(b))) {
- // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
- // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
- val isSthgLeft = b.toList.exists {
- case _: LOAD_EXCEPTION => false
- case _: SCOPE_ENTER => false
- case _: SCOPE_EXIT => false
- case _: JUMP => false
- case _ => true
- }
- if(!isSthgLeft) {
- emit(asm.Opcodes.NOP)
- }
+ restTagss = restTagss.tail
+ restBranches = restBranches.tail
+ }
+ val defaultLabel = labels(restBranches.head)
+ assert(restBranches.tail.isEmpty)
+ debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
+ jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto) {
+ jcode goTo labels(whereto)
+ } else if (m.exh.exists(eh => eh.covers(b))) {
+ // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
+ // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
+ val isSthgLeft = b.toList.exists {
+ case _: LOAD_EXCEPTION => false
+ case _: SCOPE_ENTER => false
+ case _: SCOPE_EXIT => false
+ case _: JUMP => false
+ case _ => true
}
+ if (!isSthgLeft) {
+ emit(asm.Opcodes.NOP)
+ }
+ }
- case CJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
+ case CJUMP(success, failure, cond, kind) =>
+ if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF_ICMP(cond.negate, labels(failure))
+ // .. and fall through to success label
} else {
- (kind: @unchecked) match {
- case LONG => emit(Opcodes.LCMP)
- case FLOAT =>
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
+ jcode.emitIF_ICMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
}
-
- case CZJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- // @unchecked because references aren't compared with GT, GE, LT, LE.
- ((cond, nextBlock) : @unchecked) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode goTo labels(failure)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode goTo labels(failure)
- }
+ } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ if (nextBlock == success) {
+ jcode.emitIF_ACMP(cond.negate, labels(failure))
+ // .. and fall through to success label
} else {
- (kind: @unchecked) match {
- case LONG =>
- emit(Opcodes.LCONST_0)
- emit(Opcodes.LCMP)
- case FLOAT =>
- emit(Opcodes.FCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
- else emit(Opcodes.FCMPL)
- case DOUBLE =>
- emit(Opcodes.DCONST_0)
- if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
- else emit(Opcodes.DCMPL)
- }
- if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
- } else {
- jcode.emitIF(cond, labels(success))
- if (nextBlock != failure) { jcode goTo labels(failure) }
- }
+ jcode.emitIF_ACMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
}
+ } else {
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ case FLOAT =>
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
- }
-
- case icodes.retCat => (instr: @unchecked) match {
- case RETURN(kind) => jcode emitRETURN kind
- case THROW(_) => emit(Opcodes.ATHROW)
- }
+ case CZJUMP(success, failure, cond, kind) =>
+ if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ val Success = success
+ val Failure = failure
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ ((cond, nextBlock): @unchecked) match {
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
+ case (EQ, _) =>
+ jcode emitIFNULL labels(success)
+ jcode goTo labels(failure)
+ case (NE, _) =>
+ jcode emitIFNONNULL labels(success)
+ jcode goTo labels(failure)
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG =>
+ emit(Opcodes.LCONST_0)
+ emit(Opcodes.LCMP)
+ case FLOAT =>
+ emit(Opcodes.FCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(Opcodes.DCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
}
+ genJumpInstr
+ case icodes.retCat =>
+ def genRetInstr() = (instr: @unchecked) match {
+ case RETURN(kind) => jcode emitRETURN kind
+ case THROW(_) => emit(Opcodes.ATHROW)
+ }
+ genRetInstr
}
-
- } // end of genCode()'s genBlock()
+ }
/**
* Emits one or more conversion instructions based on the types given as arguments.
@@ -2759,6 +2781,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
case Negation(kind) => jcode.neg(kind)
case Arithmetic(op, kind) =>
+ def genArith() = {
op match {
case ADD => jcode.add(kind)
@@ -2781,57 +2804,89 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
case _ =>
abort("Unknown arithmetic primitive " + primitive)
}
+ }
+ genArith
// TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
// TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
// TODO How about adding some asserts to Logical and similar ones to capture the remaining constraint (UNIT not allowed).
- case Logical(op, kind) => ((op, kind): @unchecked) match {
- case (AND, LONG) => emit(Opcodes.LAND)
- case (AND, INT) => emit(Opcodes.IAND)
- case (AND, _) =>
- emit(Opcodes.IAND)
- if (kind != BOOL) { emitT2T(INT, kind) }
-
- case (OR, LONG) => emit(Opcodes.LOR)
- case (OR, INT) => emit(Opcodes.IOR)
- case (OR, _) =>
- emit(Opcodes.IOR)
- if (kind != BOOL) { emitT2T(INT, kind) }
-
- case (XOR, LONG) => emit(Opcodes.LXOR)
- case (XOR, INT) => emit(Opcodes.IXOR)
- case (XOR, _) =>
- emit(Opcodes.IXOR)
- if (kind != BOOL) { emitT2T(INT, kind) }
- }
+ case Logical(op, kind) =>
+ def genLogical() = op match {
+ case AND =>
+ kind match {
+ case LONG => emit(Opcodes.LAND)
+ case INT => emit(Opcodes.IAND)
+ case _ =>
+ emit(Opcodes.IAND)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+ case OR =>
+ kind match {
+ case LONG => emit(Opcodes.LOR)
+ case INT => emit(Opcodes.IOR)
+ case _ =>
+ emit(Opcodes.IOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+ case XOR =>
+ kind match {
+ case LONG => emit(Opcodes.LXOR)
+ case INT => emit(Opcodes.IXOR)
+ case _ =>
+ emit(Opcodes.IXOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+ }
+ genLogical
+
+ case Shift(op, kind) =>
+ def genShift() = op match {
+ case LSL =>
+ kind match {
+ case LONG => emit(Opcodes.LSHL)
+ case INT => emit(Opcodes.ISHL)
+ case _ =>
+ emit(Opcodes.ISHL)
+ emitT2T(INT, kind)
+ }
+ case ASR =>
+ kind match {
+ case LONG => emit(Opcodes.LSHR)
+ case INT => emit(Opcodes.ISHR)
+ case _ =>
+ emit(Opcodes.ISHR)
+ emitT2T(INT, kind)
+ }
+ case LSR =>
+ kind match {
+ case LONG => emit(Opcodes.LUSHR)
+ case INT => emit(Opcodes.IUSHR)
+ case _ =>
+ emit(Opcodes.IUSHR)
+ emitT2T(INT, kind)
+ }
+ }
+ genShift
- case Shift(op, kind) => ((op, kind): @unchecked) match {
- case (LSL, LONG) => emit(Opcodes.LSHL)
- case (LSL, INT) => emit(Opcodes.ISHL)
- case (LSL, _) =>
- emit(Opcodes.ISHL)
- emitT2T(INT, kind)
-
- case (ASR, LONG) => emit(Opcodes.LSHR)
- case (ASR, INT) => emit(Opcodes.ISHR)
- case (ASR, _) =>
- emit(Opcodes.ISHR)
- emitT2T(INT, kind)
-
- case (LSR, LONG) => emit(Opcodes.LUSHR)
- case (LSR, INT) => emit(Opcodes.IUSHR)
- case (LSR, _) =>
- emit(Opcodes.IUSHR)
- emitT2T(INT, kind)
- }
+ case Comparison(op, kind) =>
+ def genCompare() = op match {
+ case CMP =>
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ }
+ case CMPL =>
+ (kind: @unchecked) match {
+ case FLOAT => emit(Opcodes.FCMPL)
+ case DOUBLE => emit(Opcodes.DCMPL)
+ }
+ case CMPG =>
+ (kind: @unchecked) match {
+ case FLOAT => emit(Opcodes.FCMPG)
+ case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
- case Comparison(op, kind) => ((op, kind): @unchecked) match {
- case (CMP, LONG) => emit(Opcodes.LCMP)
- case (CMPL, FLOAT) => emit(Opcodes.FCMPL)
- case (CMPG, FLOAT) => emit(Opcodes.FCMPG)
- case (CMPL, DOUBLE) => emit(Opcodes.DCMPL)
- case (CMPG, DOUBLE) => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
- }
+ }
+ }
+ genCompare
case Conversion(src, dst) =>
debuglog("Converting from: " + src + " to: " + dst)
@@ -2894,7 +2949,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters {
// indexOf(local)
// }
- @inline final def indexOf(local: Local): Int = {
+ final def indexOf(local: Local): Int = {
assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
local.index
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index bad4ecc647..62c281b82f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -18,7 +18,7 @@ import JAccessFlags._
import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
import java.util.jar.{ JarEntry, JarOutputStream }
import scala.tools.nsc.io.AbstractFile
-import language.postfixOps
+import scala.language.postfixOps
/** This class ...
*
@@ -122,8 +122,10 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
inform("[running phase " + name + " on icode]")
if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ log(s"Optimizer eliminated ${sym.fullNameString}")
icodes.classes -= sym
+ }
// For predictably ordered error messages.
val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
@@ -728,7 +730,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// generic information could disappear as a consequence of a seemingly
// unrelated change.
settings.Ynogenericsig.value
- || sym.isHidden
+ || sym.isArtifact
|| sym.isLiftedMethod
|| sym.isBridge
|| (sym.ownerChain exists (_.isImplClass))
@@ -866,7 +868,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
def genField(f: IField) {
debuglog("Adding field: " + f.symbol.fullName)
-
+
val jfield = jclass.addNewField(
javaFieldFlags(f.symbol),
javaName(f.symbol),
@@ -1021,8 +1023,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
method = m
jmethod = clinitMethod
-
- computeLocalVarsIndex(m)
genCode(m)
case None =>
legacyStaticInitializer(cls, clinit)
@@ -1116,7 +1116,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
}
debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
-
+
for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
@@ -1124,9 +1124,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- if (m.isAccessor && m.accessed.hasStaticAnnotation) {
- log("@static: accessor " + m + ", accessed: " + m.accessed)
- } else addForwarder(jclass, moduleClass, m)
+ addForwarder(jclass, moduleClass, m)
}
}
}
@@ -1308,7 +1306,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
jclass.getType())
}
}
-
+
style match {
case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
@@ -1889,7 +1887,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def computeLocalVarsIndex(m: IMethod) {
var idx = if (m.symbol.isStaticMember) 0 else 1;
-
+
for (l <- m.params) {
debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
l.index = idx
@@ -1960,7 +1958,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// Nested objects won't receive ACC_FINAL in order to allow for their overriding.
val finalFlag = (
- (sym.hasFlag(Flags.FINAL) || isTopLevelModule(sym))
+ (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
&& !sym.enclClass.isInterface
&& !sym.isClassConstructor
&& !sym.isMutable // lazy vals and vars both
@@ -1977,7 +1975,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isHidden) ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index 465a0c61e8..f56aa74d53 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -15,7 +15,7 @@ import scala.tools.nsc.symtab._
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
import ch.epfl.lamp.compiler.msil.util.PECustomMod
-import language.postfixOps
+import scala.language.postfixOps
abstract class GenMSIL extends SubComponent {
import global._
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 7772ccbdd5..eb2da72401 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -35,7 +35,7 @@ abstract class ClosureElimination extends SubComponent {
case (STORE_LOCAL(x), LOAD_LOCAL(y)) if (x == y) =>
var liveOut = liveness.out(bb)
if (!liveOut(x)) {
- log("store/load to a dead local? " + x)
+ debuglog("store/load to a dead local? " + x)
val instrs = bb.getArray
var idx = instrs.length - 1
while (idx > 0 && (instrs(idx) ne i2)) {
@@ -43,7 +43,7 @@ abstract class ClosureElimination extends SubComponent {
idx -= 1
}
if (!liveOut(x)) {
- log("removing dead store/load " + x)
+ log("Removing dead store/load of " + x.sym.initialize.defString)
Some(Nil)
} else None
} else
@@ -84,6 +84,7 @@ abstract class ClosureElimination extends SubComponent {
*/
class ClosureElim {
def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods foreach { m =>
analyzeMethod(m)
peephole(m)
@@ -95,7 +96,6 @@ abstract class ClosureElimination extends SubComponent {
/* Some embryonic copy propagation. */
def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
- log("Analyzing " + m)
cpp.init(m)
cpp.run
@@ -110,23 +110,20 @@ abstract class ClosureElimination extends SubComponent {
t match {
case Deref(This) | Const(_) =>
bb.replaceInstruction(i, valueToInstruction(t));
- log("replaced " + i + " with " + t)
+ debuglog(s"replaced $i with $t")
case _ =>
- bb.replaceInstruction(i, LOAD_LOCAL(info.getAlias(l)))
- log("replaced " + i + " with " + info.getAlias(l))
-
+ val t = info.getAlias(l)
+ bb.replaceInstruction(i, LOAD_LOCAL(t))
+ debuglog(s"replaced $i with $t")
}
case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
def replaceFieldAccess(r: Record) {
val Record(cls, bindings) = r
- info.getFieldNonRecordValue(r, f) match {
- case Some(v) =>
- bb.replaceInstruction(i,
- DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil);
- log("Replaced " + i + " with " + info.getFieldNonRecordValue(r, f));
- case None =>
+ info.getFieldNonRecordValue(r, f) foreach { v =>
+ bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
+ debuglog(s"replaced $i with $v")
}
}
@@ -157,14 +154,14 @@ abstract class ClosureElimination extends SubComponent {
value match {
case Boxed(LocalVar(loc2)) =>
bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(info.getBinding(loc2)) :: Nil)
- log("replaced " + i + " with " + info.getBinding(loc2))
+ debuglog("replaced " + i + " with " + info.getBinding(loc2))
case _ =>
()
}
case Boxed(LocalVar(loc1)) :: _ =>
val loc2 = info.getAlias(loc1)
bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(Deref(LocalVar(loc2))) :: Nil)
- log("replaced " + i + " with " + LocalVar(loc2))
+ debuglog("replaced " + i + " with " + LocalVar(loc2))
case _ =>
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fd949576e1..36a5d61cfb 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -44,6 +44,7 @@ abstract class DeadCodeElimination extends SubComponent {
class DeadCode {
def analyzeClass(cls: IClass) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods.foreach { m =>
this.method = m
dieCodeDie(m)
@@ -73,7 +74,7 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
- log("dead code elimination on " + m);
+ debuglog("dead code elimination on " + m);
dropOf.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
@@ -82,8 +83,10 @@ abstract class DeadCodeElimination extends SubComponent {
mark
sweep(m)
accessedLocals = accessedLocals.distinct
- if ((m.locals diff accessedLocals).nonEmpty) {
- log("Removed dead locals: " + (m.locals diff accessedLocals))
+ val diff = m.locals diff accessedLocals
+ if (diff.nonEmpty) {
+ val msg = diff.map(_.sym.name)mkString(", ")
+ log(s"Removed ${diff.size} dead locals: $msg")
m.locals = accessedLocals.reverse
}
}
@@ -126,7 +129,7 @@ abstract class DeadCodeElimination extends SubComponent {
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); log("marking " + m1)
+ case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); debuglog("marking " + m1)
case CALL_METHOD(m1, SuperCall(_)) =>
worklist += ((bb, idx)) // super calls to constructor
case DROP(_) =>
@@ -173,7 +176,7 @@ abstract class DeadCodeElimination extends SubComponent {
instr match {
case LOAD_LOCAL(l1) =>
for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
- log("\tAdding " + bb1(idx1))
+ debuglog("\tAdding " + bb1(idx1))
worklist += ((bb1, idx1))
}
@@ -197,7 +200,7 @@ abstract class DeadCodeElimination extends SubComponent {
case _ =>
for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- log("\tAdding " + bb1(idx1))
+ debuglog("\tAdding " + bb1(idx1))
worklist += ((bb1, idx1))
}
}
@@ -232,7 +235,7 @@ abstract class DeadCodeElimination extends SubComponent {
} else {
i match {
case NEW(REFERENCE(sym)) =>
- log("skipped object creation: " + sym + "inside " + m)
+ log(s"Eliminated instantation of $sym inside $m")
case _ => ()
}
debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
@@ -240,7 +243,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
if (bb.nonEmpty) bb.close
- else log("empty block encountered")
+ else log(s"empty block encountered in $m")
}
}
@@ -252,7 +255,7 @@ abstract class DeadCodeElimination extends SubComponent {
foreachWithIndex(bb.toList) { (i, idx) =>
if (!useful(bb)(idx)) {
foreachWithIndex(i.consumedTypes.reverse) { (consumedType, depth) =>
- log("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
+ debuglog("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
val (bb, idx) = d
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index f1f597322e..98120f0614 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -93,10 +93,9 @@ abstract class InlineExceptionHandlers extends SubComponent {
val startTime = System.currentTimeMillis
currentClass = c
- log("Starting " + c)
+ debuglog("Starting InlineExceptionHandlers on " + c)
c.methods foreach applyMethod
-
- log("Finished " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
+ debuglog("Finished InlineExceptionHandlers on " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
currentClass = null
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index d87a242f1b..e9fb060dda 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -44,7 +44,7 @@ abstract class Inliners extends SubComponent {
import definitions.{
NullClass, NothingClass, ObjectClass,
PredefModule, RuntimePackage, ScalaInlineClass, ScalaNoInlineClass,
- isFunctionType
+ isFunctionType, isByNameParamType
}
val phaseName = "inliner"
@@ -143,7 +143,6 @@ abstract class Inliners extends SubComponent {
}
def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
- def posToStr(pos: scala.reflect.internal.util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
/** Is the given class a closure? */
def isClosureClass(cls: Symbol): Boolean =
@@ -194,6 +193,27 @@ abstract class Inliners extends SubComponent {
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
+ private def ownedName(sym: Symbol): String = afterUncurry {
+ val count = (
+ if (!sym.isMethod) 1
+ else if (sym.owner.isAnonymousFunction) 3
+ else 2
+ )
+ (sym.ownerChain take count filterNot (_.isPackageClass)).reverseMap(_.nameString).mkString(".")
+ }
+ private def inlineLog(what: String, main: => String, comment: => String) {
+ def cstr = comment match {
+ case "" => ""
+ case str => " // " + str
+ }
+ val width = if (currentIClazz eq null) 40 else currentIClazz.symbol.enclosingPackage.fullName.length + 25
+ val fmt = "%8s %-" + width + "s" + cstr
+ log(fmt.format(what, main))
+ }
+ private def inlineLog(what: String, main: Symbol, comment: => String) {
+ inlineLog(what, ownedName(main), comment)
+ }
+
val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
private def getRecentTFA(incm: IMethod, forceable: Boolean): (Boolean, analysis.MethodTFA) = {
@@ -244,14 +264,15 @@ abstract class Inliners extends SubComponent {
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
- debuglog("Analyzing " + cls)
+ inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
this.currentIClazz = cls
val ms = cls.methods sorted imethodOrdering
ms foreach { im =>
- if(hasInline(im.symbol)) {
- log("Not inlining into " + im.symbol.originalName.decode + " because it is marked @inline.")
- } else if(im.hasCode && !im.symbol.isBridge) {
+ if (hasInline(im.symbol)) {
+ inlineLog("skip", im.symbol, "no inlining into @inline methods")
+ }
+ else if(im.hasCode && !im.symbol.isBridge) {
analyzeMethod(im)
}
}
@@ -296,6 +317,8 @@ abstract class Inliners extends SubComponent {
* */
def analyzeMethod(m: IMethod): Unit = {
// m.normalize
+ if (settings.debug.value)
+ inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
var sizeBeforeInlining = m.code.blockCount
var instrBeforeInlining = m.code.instructionCount
@@ -306,8 +329,8 @@ abstract class Inliners extends SubComponent {
val fresh = mutable.HashMap.empty[String, Int] withDefaultValue 0
// how many times have we already inlined this method here?
val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
-
val caller = new IMethodInfo(m)
+ def analyzeMessage = s"Analyzing ${caller.length} blocks of $m for inlining sites."
def preInline(isFirstRound: Boolean): Int = {
val inputBlocks = caller.m.linearizedBlocks()
@@ -354,15 +377,17 @@ abstract class Inliners extends SubComponent {
*/
def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
assert(bb.toList contains i, "Candidate callsite does not belong to BasicBlock.")
-
- var inlined = false
val shouldWarn = hasInline(i.method)
- def warnNoInline(reason: String) = {
- if (shouldWarn) {
- warn(i.pos, "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason))
- }
- }
+ def warnNoInline(reason: String): Boolean = {
+ def msg = "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason)
+ if (settings.debug.value)
+ inlineLog("fail", i.method.fullName, reason)
+ if (shouldWarn)
+ warn(i.pos, msg)
+
+ false
+ }
var isAvailable = icodes available concreteMethod.enclClass
@@ -378,92 +403,69 @@ abstract class Inliners extends SubComponent {
isAvailable = icodes.load(concreteMethod.enclClass)
}
- def isCandidate = (
- isClosureClass(receiver)
- || concreteMethod.isEffectivelyFinal
- || receiver.isEffectivelyFinal
- )
+ def isCandidate = (
+ isClosureClass(receiver)
+ || concreteMethod.isEffectivelyFinal
+ || receiver.isEffectivelyFinal
+ )
- def isApply = concreteMethod.name == nme.apply
+ def isApply = concreteMethod.name == nme.apply
- def isCountable = !(
- isClosureClass(receiver)
- || isApply
- || isMonadicMethod(concreteMethod)
- || receiver.enclosingPackage == definitions.RuntimePackage
- ) // only count non-closures
+ def isCountable = !(
+ isClosureClass(receiver)
+ || isApply
+ || isMonadicMethod(concreteMethod)
+ || receiver.enclosingPackage == definitions.RuntimePackage
+ ) // only count non-closures
debuglog("Treating " + i
+ "\n\treceiver: " + receiver
+ "\n\ticodes.available: " + isAvailable
+ "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isEffectivelyFinal)
- if (isAvailable && isCandidate) {
- lookupIMethod(concreteMethod, receiver) match {
-
- case Some(callee) if callee.hasCode =>
- val inc = new IMethodInfo(callee)
- val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
-
- if(inc.hasHandlers && (stackLength == -1)) {
- // no inlining is done, yet don't warn about it, stackLength == -1 indicates we're trying to inlineWithoutTFA.
- // Shortly, a TFA will be computed and an error message reported if indeed inlining not possible.
- return false
- }
-
- (pair isStampedForInlining stackLength) match {
-
- case inlInfo if inlInfo.isSafe =>
-
- (inlInfo: @unchecked) match {
-
- case FeasibleInline(accessNeeded, toBecomePublic) =>
- for(f <- toBecomePublic) {
- debuglog("Making public (synthetic) field-symbol: " + f)
- f setFlag Flags.notPRIVATE
- f setFlag Flags.notPROTECTED
- }
- // only add to `knownSafe` after all `toBecomePublic` fields actually made public.
- if(accessNeeded == NonPublicRefs.Public) { tfa.knownSafe += inc.sym }
-
- case InlineableAtThisCaller => ()
-
- }
-
- retry = true
- inlined = true
- if (isCountable) { count += 1 };
+ if (!isCandidate) warnNoInline("it can be overridden")
+ else if (!isAvailable) warnNoInline("bytecode unavailable")
+ else lookupIMethod(concreteMethod, receiver) filter (callee => callee.hasCode || warnNoInline("callee has no code")) exists { callee =>
+ val inc = new IMethodInfo(callee)
+ val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- pair.doInline(bb, i)
- if (!pair.isInlineForced || inc.isMonadic) { caller.inlinedCalls += 1 };
- inlinedMethodCount(inc.sym) += 1
-
- // Remove the caller from the cache (this inlining might have changed its calls-private relation).
- usesNonPublics -= m
- recentTFAs -= m.symbol
-
-
- case DontInlineHere(msg) =>
- debuglog("inline failed, reason: " + msg)
- warnNoInline(msg)
-
- case NeverSafeToInline => ()
- }
-
- case Some(callee) =>
- assert(!callee.hasCode, "The case clause right before this one should have handled this case.")
- warnNoInline("callee (" + callee + ") has no code")
- ()
+ if (inc.hasHandlers && (stackLength == -1)) {
+ // no inlining is done, yet don't warn about it, stackLength == -1 indicates we're trying to inlineWithoutTFA.
+ // Shortly, a TFA will be computed and an error message reported if indeed inlining not possible.
+ false
+ }
+ else {
+ val isSafe = pair isStampedForInlining stackLength match {
+ case DontInlineHere(msg) => warnNoInline(msg)
+ case NeverSafeToInline => false
+ case InlineableAtThisCaller => true
+ case inl @ FeasibleInline(_, _) if !inl.isSafe => false
+ case FeasibleInline(required, toPublicize) =>
+ for (f <- toPublicize) {
+ inlineLog("access", f, "making public")
+ f setFlag Flags.notPRIVATE
+ f setFlag Flags.notPROTECTED
+ }
+ // only add to `knownSafe` after all `toPublicize` fields actually made public.
+ if (required == NonPublicRefs.Public)
+ tfa.knownSafe += inc.sym
- case None =>
- warnNoInline("bytecode was not available")
- debuglog("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
+ true
+ }
+ isSafe && {
+ retry = true
+ if (isCountable) count += 1
+ pair.doInline(bb, i)
+ if (!pair.isInlineForced || inc.isMonadic) caller.inlinedCalls += 1
+ inlinedMethodCount(inc.sym) += 1
+
+ // Remove the caller from the cache (this inlining might have changed its calls-private relation).
+ usesNonPublics -= m
+ recentTFAs -= m.symbol
+ true
+ }
}
- } else {
- warnNoInline(if(!isAvailable) "bytecode was not available" else "it can be overridden")
}
-
- inlined
}
/* Pre-inlining consists in invoking the usual inlining subroutine with (receiver class, concrete method) pairs as input
@@ -485,7 +487,7 @@ abstract class Inliners extends SubComponent {
do {
retry = false
- log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
+ debuglog(analyzeMessage)
/* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
tfa.callerLin = caller.m.linearizedBlocks()
@@ -567,9 +569,16 @@ abstract class Inliners extends SubComponent {
m.normalize
if (sizeBeforeInlining > 0) {
val instrAfterInlining = m.code.instructionCount
- val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) " !! " else ""
- log(prefix + " %s blocks before inlining: %d (%d) after: %d (%d)".format(
- m.symbol.fullName, sizeBeforeInlining, instrBeforeInlining, m.code.blockCount, instrAfterInlining))
+ val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
+ val inlinings = caller.inlinedCalls
+ if (inlinings > 0) {
+ val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
+ val s2 = if (sizeBeforeInlining == m.code.blockCount) "" else s", blocks $sizeBeforeInlining -> ${m.code.blockCount}"
+ val callees = inlinedMethodCount.toList map { case (k, v) => k.fullNameString + ( if (v == 1) "" else "/" + v ) }
+
+ inlineLog("inlined", m.symbol.fullName, callees.sorted.mkString(inlinings + " inlined: ", ", ", ""))
+ inlineLog("<<tldr>>", m.symbol.fullName, s"${m.symbol.nameString}: $s1$s2")
+ }
}
}
@@ -589,6 +598,8 @@ abstract class Inliners extends SubComponent {
}
class IMethodInfo(val m: IMethod) {
+ override def toString = m.toString
+
val sym = m.symbol
val name = sym.name
def owner = sym.owner
@@ -608,10 +619,11 @@ abstract class Inliners extends SubComponent {
def instructions = m.code.instructions
// def linearized = linearizer linearize m
- def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
- def isLarge = length > MAX_INLINE_SIZE
- def isRecursive = m.recursive
- def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
+ def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
+ def isLarge = length > MAX_INLINE_SIZE
+ def isRecursive = m.recursive
+ def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
+ def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
@@ -661,13 +673,15 @@ abstract class Inliners extends SubComponent {
*
* TODO handle more robustly the case of a trait var changed at the source-level from public to private[this]
* (eg by having ICodeReader use unpickler, see SI-5442).
- * */
+
+ DISABLED
+
def potentiallyPublicized(f: Symbol): Boolean = {
(m.sourceFile eq NoSourceFile) && f.name.containsChar('$')
}
+ */
- def checkField(f: Symbol) = check(f, potentiallyPublicized(f) ||
- (f.isPrivate && !canMakePublic(f)))
+ def checkField(f: Symbol) = check(f, f.isPrivate && !canMakePublic(f))
def checkSuper(n: Symbol) = check(n, n.isPrivate || !n.isClassConstructor)
def checkMethod(n: Symbol) = check(n, n.isPrivate)
@@ -685,7 +699,7 @@ abstract class Inliners extends SubComponent {
val i = iter.next()
getAccess(i) match {
case Private =>
- log("instruction " + i + " requires private access.")
+ inlineLog("access", s"instruction $i requires private access", "pos=" + i.pos)
toBecomePublic = Nil
seen = Private
case Protected => seen = Protected
@@ -732,7 +746,7 @@ abstract class Inliners extends SubComponent {
toBecomePublic: List[Symbol]
)
- final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
+ final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: scala.collection.Map[Symbol, Int]) {
assert(!caller.isBridge && inc.m.hasCode,
"A guard in Inliner.analyzeClass() should have prevented from getting here.")
@@ -762,11 +776,10 @@ abstract class Inliners extends SubComponent {
tfa.warnIfInlineFails.remove(instr)
val targetPos = instr.pos
- log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
def blockEmit(i: Instruction) = block.emit(i, targetPos)
def newLocal(baseName: String, kind: TypeKind) =
- new Local(caller.sym.newVariable(freshName(baseName), targetPos), kind, false)
+ new Local(caller.sym.newVariable(freshName(baseName), targetPos) setInfo kind.toType, kind, false)
val (hasRETURN, a) = getRecentTFA(inc.m, isInlineForced)
@@ -953,6 +966,7 @@ abstract class Inliners extends SubComponent {
if(reasonWhyNever != null) {
tfa.knownNever += inc.sym
+ inlineLog("never", inc.sym, reasonWhyNever)
// next time around NeverSafeToInline is returned, thus skipping (duplicate) msg, this is intended.
return DontInlineHere(inc.m + " " + reasonWhyNever)
}
@@ -975,10 +989,15 @@ abstract class Inliners extends SubComponent {
* As a result of (b), some synthetic private members can be chosen to become public.
*/
- if(!isInlineForced && !isScoreOK) {
+ val score = inlinerScore
+ val scoreStr = if (score > 0) "+" + score else "" + score
+ val what = if (score > 0) "ok to" else "don't"
+ inlineLog(scoreStr, inc.m.symbol, s"$what inline into ${ownedName(caller.m.symbol)}")
+
+ if (!isInlineForced && score <= 0) {
// During inlining retry, a previous caller-callee pair that scored low may pass.
// Thus, adding the callee to tfa.knownUnsafe isn't warranted.
- return DontInlineHere("too low score (heuristics)")
+ return DontInlineHere(s"inliner heuristic")
}
if(inc.hasHandlers && (stackLength > inc.minimumStack)) {
@@ -997,7 +1016,9 @@ abstract class Inliners extends SubComponent {
val accReq = inc.accessRequirements
if(!canAccess(accReq.accessNeeded)) {
tfa.knownUnsafe += inc.sym
- return DontInlineHere("access level required by callee not matched by caller")
+ val msg = "access level required by callee not matched by caller"
+ inlineLog("fail", inc.sym, msg)
+ return DontInlineHere(msg)
}
FeasibleInline(accReq.accessNeeded, accReq.toBecomePublic)
@@ -1019,9 +1040,7 @@ abstract class Inliners extends SubComponent {
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- def isScoreOK: Boolean = {
- debuglog("shouldInline: " + caller.m + " , callee:" + inc.m)
-
+ def inlinerScore: Int = {
var score = 0
// better not inline inside closures, but hope that the closure itself is repeatedly inlined
@@ -1029,21 +1048,19 @@ abstract class Inliners extends SubComponent {
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
if (inc.isSmall) score += 1;
+ // if (inc.hasClosureParam) score += 2
if (inc.isLarge) score -= 1;
if (caller.isSmall && isLargeSum) {
score -= 1
- debuglog("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
+ debuglog(s"inliner score decreased to $score because small caller $caller would become large")
}
if (inc.isMonadic) score += 3
else if (inc.isHigherOrder) score += 1
- if (inc.isInClosure) score += 2;
- if (inlinedMethodCount(inc.sym) > 2) score -= 2;
-
- log("shouldInline(" + inc.m + ") score: " + score)
-
- score > 0
+ if (inc.isInClosure) score += 2
+ if (inlinedMethodCount(inc.sym) > 2) score -= 2
+ score
}
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 176c00c025..7f5f412a20 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -3,7 +3,7 @@ package dependencies
import symtab.Flags
-import collection._
+import scala.collection._
/** A component that describes the possible changes between successive
* compilations of a class.
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index 317cc28298..cdde768274 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -2,7 +2,7 @@ package scala.tools.nsc
package dependencies
import io.Path
-import collection._
+import scala.collection._
import symtab.Flags
import scala.tools.nsc.io.AbstractFile
import scala.reflect.internal.util.SourceFile
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 720b1347ef..64a376b96e 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -8,7 +8,7 @@ package doc
import java.io.File
import java.lang.System
-import language.postfixOps
+import scala.language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
* @param error A function that prints a string to the appropriate error stream
@@ -111,6 +111,12 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"only use it if you haven't defined usecase for implicitly inherited members."
)
+ val docImplicitsHide = MultiStringSetting (
+ "-implicits-hide",
+ "implicit(s)",
+ "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
+ )
+
val docDiagrams = BooleanSetting (
"-diagrams",
"Create inheritance diagrams for classes, traits and packages."
@@ -188,6 +194,12 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
)
+ val docExternalUrls = MultiStringSetting (
+ "-external-urls",
+ "externalUrl(s)",
+ "comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated"
+ )
+
val docGroups = BooleanSetting (
"-groups",
"Group similar functions together (based on the @group annotation)"
@@ -203,7 +215,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
docDiagrams, docDiagramsDebug, docDiagramsDotPath,
docDiagramsDotTimeout, docDiagramsDotRestart,
- docImplicits, docImplicitsDebug, docImplicitsShowAll,
+ docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages,
docExpandAllTypes, docGroups
@@ -224,6 +236,30 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
def skipPackage(qname: String) =
skipPackageNames(qname.toLowerCase)
+ lazy val hiddenImplicits: Set[String] = {
+ if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets
+ else docImplicitsHide.value.toSet flatMap { name: String =>
+ if(name == ".") hardcoded.commonConversionTargets
+ else Set(name)
+ }
+ }
+
+ // TODO: Enable scaladoc to scoop up the package list from another scaladoc site, just as javadoc does
+ // -external-urls 'http://www.scala-lang.org/archives/downloads/distrib/files/nightly/docs/library'
+ // should trigger scaldoc to fetch the package-list file. The steps necessary:
+ // 1 - list all packages generated in scaladoc in the package-list file, exactly as javadoc:
+ // see http://docs.oracle.com/javase/6/docs/api/package-list for http://docs.oracle.com/javase/6/docs/api
+ // 2 - download the file and add the packages to the list
+ lazy val extUrlMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) {
+ case (map, binding) =>
+ val idx = binding indexOf "="
+ val pkgs = binding substring (0, idx) split ":"
+ var url = binding substring (idx + 1)
+ val index = "/index.html"
+ url = if (url.endsWith(index)) url else url + index
+ map ++ (pkgs map (_ -> url))
+ }
+
/**
* This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
* but ultimately scaladoc has to be useful. :)
@@ -236,15 +272,15 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
* the function result should be a humanly-understandable description of the type class
*/
val knownTypeClasses: Map[String, String => String] = Map() +
- ("scala.math.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
- ("scala.math.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
- ("scala.math.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
- ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
- ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.AbsTypeTag" -> ((tparam: String) => tparam + " is accompanied by an AbsTypeTag, which is a runtime representation of its type that survives erasure")) +
- ("scala.reflect.base.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
+ ("scala.math.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
+ ("scala.math.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
+ ("scala.math.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
+ ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
+ ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.api.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
/**
* Set of classes to exclude from index and diagrams
@@ -264,7 +300,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
}
/** Common conversion targets that affect any class in Scala */
- val commonConversionTargets = List(
+ val commonConversionTargets = Set(
"scala.Predef.any2stringfmt",
"scala.Predef.any2stringadd",
"scala.Predef.any2ArrowAssoc",
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
index 7e57f9fd9f..812b62a1c6 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
@@ -5,8 +5,8 @@
package scala.tools.nsc
package doc
-import language.implicitConversions
-import language.postfixOps
+import scala.language.implicitConversions
+import scala.language.postfixOps
/** Some glue between DocParser (which reads source files which can't be compiled)
* and the scaladoc model.
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 7a74c569f3..2c719e5d70 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -10,8 +10,8 @@ package html
import model._
import comment._
-import xml.{XML, NodeSeq}
-import xml.dtd.{DocType, PublicID}
+import scala.xml.{XML, NodeSeq}
+import scala.xml.dtd.{DocType, PublicID}
import scala.collection._
import java.io.Writer
@@ -87,7 +87,7 @@ abstract class HtmlPage extends Page { thisPage =>
case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
case Code(data) =>
- <pre>{ SyntaxHigh(data) }</pre> //<pre>{ xml.Text(data) }</pre>
+ <pre>{ SyntaxHigh(data) }</pre> //<pre>{ scala.xml.Text(data) }</pre>
case UnorderedList(items) =>
<ul>{ listItemsToHtml(items) }</ul>
case OrderedList(items, listStyle) =>
@@ -119,9 +119,9 @@ abstract class HtmlPage extends Page { thisPage =>
case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
case Monospace(in) => <code>{ inlineToHtml(in) }</code>
- case Text(text) => xml.Text(text)
+ case Text(text) => scala.xml.Text(text)
case Summary(in) => inlineToHtml(in)
- case HtmlTag(tag) => xml.Unparsed(tag)
+ case HtmlTag(tag) => scala.xml.Unparsed(tag)
case EntityLink(target, link) => linkToHtml(target, link, true)
}
@@ -138,7 +138,8 @@ abstract class HtmlPage extends Page { thisPage =>
<span class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</span>
case Tooltip(tooltip) =>
<span class="extype" name={ tooltip }>{ inlineToHtml(text) }</span>
- // TODO: add case LinkToExternal here
+ case LinkToExternal(name, url) =>
+ <a href={ url } class="extype" target="_top">{ inlineToHtml(text) }</a>
case NoLink =>
inlineToHtml(text)
}
@@ -158,11 +159,11 @@ abstract class HtmlPage extends Page { thisPage =>
if (starts.isEmpty && (inPos == string.length))
NodeSeq.Empty
else if (starts.isEmpty)
- xml.Text(string.slice(inPos, string.length))
+ scala.xml.Text(string.slice(inPos, string.length))
else if (inPos == starts.head)
toLinksIn(inPos, starts)
else {
- xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
+ scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
}
}
def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
@@ -173,7 +174,7 @@ abstract class HtmlPage extends Page { thisPage =>
if (hasLinks)
toLinksOut(0, tpe.refEntity.keySet.toList)
else
- xml.Text(string)
+ scala.xml.Text(string)
}
def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
@@ -192,10 +193,10 @@ abstract class HtmlPage extends Page { thisPage =>
if (hasPage(dTpl)) {
<a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
} else {
- xml.Text(if (name eq null) dTpl.name else name)
+ scala.xml.Text(if (name eq null) dTpl.name else name)
}
case ndTpl: NoDocTemplate =>
- xml.Text(if (name eq null) ndTpl.name else name)
+ scala.xml.Text(if (name eq null) ndTpl.name else name)
}
/** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index f1eab841f9..e21ee07963 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc.doc.html
-import xml.NodeSeq
+import scala.xml.NodeSeq
/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
* (see method `HtmlPage.blockToHtml`).
@@ -40,7 +40,7 @@ private[html] object SyntaxHigh {
/** Standard library classes/objects, sorted alphabetically */
val standards = Array (
- "AbsTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
+ "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
"Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
"Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
@@ -281,6 +281,6 @@ private[html] object SyntaxHigh {
}
parse("", 0)
- xml.Unparsed(out.toString)
+ scala.xml.Unparsed(out.toString)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index 6d83b4e6a5..cd76f84a37 100755
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -44,7 +44,7 @@ class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends
<strike>{ name }</strike>
}</div>
<div class="occurrences">{
- for (owner <- occurrences) yield owner ++ xml.Text(" ")
+ for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
}</div>
</div>
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
index a51d66c6cc..edc0736400 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
@@ -10,7 +10,7 @@ package page
import model._
import comment._
-import xml.{NodeSeq, Unparsed}
+import scala.xml.{NodeSeq, Unparsed}
import java.io.File
class Source(sourceFile: File) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 5977acc0c2..919a45aefc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -13,7 +13,7 @@ import model.diagram._
import diagram._
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
-import language.postfixOps
+import scala.language.postfixOps
import model._
import model.diagram._
@@ -41,7 +41,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
- { if (universe.settings.docDiagrams.isSetByUser) {
+ { if (universe.settings.docDiagrams.value) {
<script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
<script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
} else NodeSeq.Empty }
@@ -49,7 +49,11 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if(top === self) {{
var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
- window.location.href = url + '#' + hash;
+ var anchor = window.location.hash;
+ var anchor_opt = '';
+ if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
+ anchor_opt = '@' + anchor.substring(1);
+ window.location.href = url + '#' + hash + anchor_opt;
}}
</script>
</xml:group>
@@ -89,7 +93,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
NodeSeq.Empty
else
- <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, xml.Text(".")) }</p>
+ <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
}
<body class={ if (tpl.isType) "type" else "value" }>
@@ -148,8 +152,13 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<div id="ancestors">
<span class="filtertype">Implicitly<br/>
</span>
- <ol id="implicits">
- { tpl.conversions.map(conv => <li class="in" name={ conv.conversionQualifiedName }><span>{ "by " + conv.conversionShortName }</span></li>) }
+ <ol id="implicits"> {
+ tpl.conversions.map { conv =>
+ val name = conv.conversionQualifiedName
+ val hide = universe.settings.hiddenImplicits(name)
+ <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
+ }
+ }
</ol>
</div>
else NodeSeq.Empty
@@ -280,6 +289,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
group={ mbr.group }>
<a id={ mbr.signature }/>
+ <a id={ mbr.signatureCompat }/>
{ signature(mbr, false) }
{ memberComment }
</li>
@@ -406,14 +416,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case Nil =>
NodeSeq.Empty
case List(constraint) =>
- xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ xml.Text(".")
+ scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".")
case List(constraint1, constraint2) =>
- xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
- xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ xml.Text(".")
+ scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
+ scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".")
case constraints =>
<br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
var index = 0
- constraints map { constraint => xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
+ constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
}
}
@@ -431,18 +441,18 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
case _ => "" // no parameters
}
- <br/> ++ xml.Text("To access this member you can use a ") ++
+ <br/> ++ scala.xml.Text("To access this member you can use a ") ++
<a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
- target="_blank">type ascription</a> ++ xml.Text(":") ++
+ target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
<br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
}
val shadowingWarning: NodeSeq =
if (mbr.isShadowedImplicit)
- xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
+ scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
"class.") ++ shadowingSuggestion
else if (mbr.isAmbiguousImplicit)
- xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
+ scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
"inherited members have similar signatures, so calling this member may produce an ambiguous " +
"implicit conversion compiler error.") ++ shadowingSuggestion
else NodeSeq.Empty
@@ -462,7 +472,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (fvs.isEmpty || isReduced) NodeSeq.Empty
else {
<dt>Attributes</dt>
- <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ xml.Text(" ") } } }</dd>
+ <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ scala.xml.Text(" ") } } }</dd>
}
}
@@ -471,7 +481,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
else {
<dt>Definition Classes</dt>
- <dd>{ templatesToHtml(inDefTpls, xml.Text(" → ")) }</dd>
+ <dd>{ templatesToHtml(inDefTpls, scala.xml.Text(" → ")) }</dd>
}
}
@@ -619,7 +629,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<div class="toggleContainer block">
<span class="toggle">Linear Supertypes</span>
<div class="superTypes hiddenContent">{
- typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", "))
+ typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
@@ -630,23 +640,34 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<div class="toggleContainer block">
<span class="toggle">Known Subclasses</span>
<div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.allSubClasses.sortBy(_.name), xml.Text(", "))
+ templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
}
- val typeHierarchy = if (s.docDiagrams.isSetByUser) mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced =>
- makeDiagramHtml(dtpl, dtpl.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
- case _ => NodeSeq.Empty
- } else NodeSeq.Empty // diagrams not generated
+ def createDiagram(f: DocTemplateEntity => Option[Diagram], description: String, id: String): NodeSeq =
+ if (s.docDiagrams.value) mbr match {
+ case dtpl: DocTemplateEntity if isSelf && !isReduced =>
+ val diagram = f(dtpl)
+ if (diagram.isDefined) {
+ val s = universe.settings
+ val diagramSvg = generator.generate(diagram.get, tpl, this)
+ if (diagramSvg != NodeSeq.Empty) {
+ <div class="toggleContainer block diagram-container" id={ id + "-container"}>
+ <span class="toggle diagram-link">{ description }</span>
+ <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
+ <div class="diagram" id={ id }>{
+ diagramSvg
+ }</div>
+ </div>
+ } else NodeSeq.Empty
+ } else NodeSeq.Empty
+ case _ => NodeSeq.Empty
+ } else NodeSeq.Empty // diagrams not generated
- val contentHierarchy = if (s.docDiagrams.isSetByUser) mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced =>
- makeDiagramHtml(dtpl, dtpl.contentDiagram, "Content Hierarchy", "content-diagram")
- case _ => NodeSeq.Empty
- } else NodeSeq.Empty // diagrams not generated
+ val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
+ val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
}
@@ -654,7 +675,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
case None => NodeSeq.Empty
- case Some(tpe) => xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
+ case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
}
bound0(lo, " >: ") ++ bound0(hi, " <: ")
}
@@ -685,7 +706,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
<xml:group>
<span class="modifier_kind">
- <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ xml.Text(" ")) }</span>
+ <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) }</span>
<span class="kind">{ kindToString(mbr) }</span>
</span>
<span class="symbol">
@@ -863,7 +884,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match {
case Nil => NodeSeq.Empty
case arg :: Nil => argumentToHtml(arg)
- case arg :: args => argumentToHtml(arg) ++ xml.Text(", ") ++ argumentsToHtml0(args)
+ case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args)
}
<span class="args">({ argumentsToHtml0(argss) })</span>
}
@@ -913,45 +934,29 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
case ktcc: KnownTypeClassConstraint =>
- xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
- templateToHtml(ktcc.typeClassEntity) ++ xml.Text(")")
+ scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
+ templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")")
case tcc: TypeClassConstraint =>
- xml.Text(tcc.typeParamName + " is ") ++
+ scala.xml.Text(tcc.typeParamName + " is ") ++
<a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
- context-bounded</a> ++ xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
- templateToHtml(tcc.typeClassEntity) ++ xml.Text(")")
+ context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
+ templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
case impl: ImplicitInScopeConstraint =>
- xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ xml.Text(" is in scope")
+ scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
case eq: EqualTypeParamConstraint =>
- xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
- typeToHtml(eq.rhs, true) ++ xml.Text(")")
+ scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
+ typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
case bt: BoundedTypeParamConstraint =>
- xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
+ scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
- typeToHtml(bt.lowerBound, true) ++ xml.Text(" <: ") ++
- typeToHtml(bt.upperBound, true) ++ xml.Text(")")
+ typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
case lb: LowerBoundedTypeParamConstraint =>
- xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
- typeToHtml(lb.lowerBound, true) ++ xml.Text(")")
+ scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
+ typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
case ub: UpperBoundedTypeParamConstraint =>
- xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
- typeToHtml(ub.upperBound, true) ++ xml.Text(")")
- }
-
- def makeDiagramHtml(tpl: DocTemplateEntity, diagram: Option[Diagram], description: String, id: String) = {
- if (diagram.isDefined) {
- val s = universe.settings
- val diagramSvg = generator.generate(diagram.get, tpl, this)
- if (diagramSvg != NodeSeq.Empty) {
- <div class="toggleContainer block diagram-container" id={ id + "-container"}>
- <span class="toggle diagram-link">{ description }</span>
- <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
- <div class="diagram" id={ id }>{
- diagramSvg
- }</div>
- </div>
- } else NodeSeq.Empty
- } else NodeSeq.Empty
+ scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
+ typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index c46c33c1ee..304c534bdc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -426,7 +426,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
else if (klass.contains("object")) "object"
else ""
- def getPosition(g: xml.Node, axis: String, offset: Double): Option[Double] = {
+ def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = {
val node = g \ "a" \ "text" \ ("@" + axis)
if (node.isEmpty)
None
@@ -508,4 +508,4 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
private val nodeAttributesStr = flatten(nodeAttributes)
private val edgeAttributesStr = flatten(edgeAttributes)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
index 16ad06c5ac..7c24308023 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
@@ -1,4 +1,154 @@
-/*! jQuery v1.7.2 jquery.com | jquery.org/license */
-(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cu(a){if(!cj[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){ck||(ck=c.createElement("iframe"),ck.frameBorder=ck.width=ck.height=0),b.appendChild(ck);if(!cl||!ck.createElement)cl=(ck.contentWindow||ck.contentDocument).document,cl.write((f.support.boxModel?"<!doctype html>":"")+"<html><body>"),cl.close();d=cl.createElement(a),cl.body.appendChild(d),e=f.css(d,"display"),b.removeChild(ck)}cj[a]=e}return cj[a]}function ct(a,b){var c={};f.each(cp.concat.apply([],cp.slice(0,b)),function(){c[this]=a});return c}function cs(){cq=b}function cr(){setTimeout(cs,0);return cq=f.now()}function ci(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ch(){try{return new a.XMLHttpRequest}catch(b){}}function cb(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g<i;g++){if(g===1)for(h in a.converters)typeof h=="string"&&(e[h.toLowerCase()]=a.converters[h]);l=k,k=d[g];if(k==="*")k=l;else if(l!=="*"&&l!==k){m=l+" "+k,n=e[m]||e["* "+k];if(!n){p=b;for(o in e){j=o.split(" ");if(j[0]===l||j[0]==="*"){p=e[j[1]+" "+k];if(p){o=e[o],o===!0?n=p:p===!0&&(n=o);break}}}}!n&&!p&&f.error("No conversion from "+m.replace(" "," to ")),n!==!0&&(c=n?n(c):p(o(c)))}}return c}function ca(a,c,d){var e=a.contents,f=a.dataTypes,g=a.responseFields,h,i,j,k;for(i in g)i in d&&(c[g[i]]=d[i]);while(f[0]==="*")f.shift(),h===b&&(h=a.mimeType||c.getResponseHeader("content-type"));if(h)for(i in e)if(e[i]&&e[i].test(h)){f.unshift(i);break}if(f[0]in d)j=f[0];else{for(i in d){if(!f[0]||a.converters[i+" "+f[0]]){j=i;break}k||(k=i)}j=j||k}if(j){j!==f[0]&&f.unshift(j);return d[j]}}function b_(a,b,c,d){if(f.isArray(b))f.each(b,function(b,e){c||bD.test(a)?d(a,e):b_(a+"["+(typeof e=="object"?b:"")+"]",e,c,d)});else if(!c&&f.type(b)==="object")for(var e in b)b_(a+"["+e+"]",b[e],c,d);else d(a,b)}function b$(a,c){var d,e,g=f.ajaxSettings.flatOptions||{};for(d in c)c[d]!==b&&((g[d]?a:e||(e={}))[d]=c[d]);e&&f.extend(!0,a,e)}function bZ(a,c,d,e,f,g){f=f||c.dataTypes[0],g=g||{},g[f]=!0;var h=a[f],i=0,j=h?h.length:0,k=a===bS,l;for(;i<j&&(k||!l);i++)l=h[i](c,d,e),typeof l=="string"&&(!k||g[l]?l=b:(c.dataTypes.unshift(l),l=bZ(a,c,d,e,l,g)));(k||!l)&&!g["*"]&&(l=bZ(a,c,d,e,"*",g));return l}function bY(a){return function(b,c){typeof b!="string"&&(c=b,b="*");if(f.isFunction(c)){var d=b.toLowerCase().split(bO),e=0,g=d.length,h,i,j;for(;e<g;e++)h=d[e],j=/^\+/.test(h),j&&(h=h.substr(1)||"*"),i=a[h]=a[h]||[],i[j?"unshift":"push"](c)}}}function bB(a,b,c){var d=b==="width"?a.offsetWidth:a.offsetHeight,e=b==="width"?1:0,g=4;if(d>0){if(c!=="border")for(;e<g;e+=2)c||(d-=parseFloat(f.css(a,"padding"+bx[e]))||0),c==="margin"?d+=parseFloat(f.css(a,c+bx[e]))||0:d-=parseFloat(f.css(a,"border"+bx[e]+"Width"))||0;return d+"px"}d=by(a,b);if(d<0||d==null)d=a.style[b];if(bt.test(d))return d;d=parseFloat(d)||0;if(c)for(;e<g;e+=2)d+=parseFloat(f.css(a,"padding"+bx[e]))||0,c!=="padding"&&(d+=parseFloat(f.css(a,"border"+bx[e]+"Width"))||0),c==="margin"&&(d+=parseFloat(f.css(a,c+bx[e]))||0);return d+"px"}function bo(a){var b=c.createElement("div");bh.appendChild(b),b.innerHTML=a.outerHTML;return b.firstChild}function bn(a){var b=(a.nodeName||"").toLowerCase();b==="input"?bm(a):b!=="script"&&typeof a.getElementsByTagName!="undefined"&&f.grep(a.getElementsByTagName("input"),bm)}function bm(a){if(a.type==="checkbox"||a.type==="radio")a.defaultChecked=a.checked}function bl(a){return typeof a.getElementsByTagName!="undefined"?a.getElementsByTagName("*"):typeof a.querySelectorAll!="undefined"?a.querySelectorAll("*"):[]}function bk(a,b){var c;b.nodeType===1&&(b.clearAttributes&&b.clearAttributes(),b.mergeAttributes&&b.mergeAttributes(a),c=b.nodeName.toLowerCase(),c==="object"?b.outerHTML=a.outerHTML:c!=="input"||a.type!=="checkbox"&&a.type!=="radio"?c==="option"?b.selected=a.defaultSelected:c==="input"||c==="textarea"?b.defaultValue=a.defaultValue:c==="script"&&b.text!==a.text&&(b.text=a.text):(a.checked&&(b.defaultChecked=b.checked=a.checked),b.value!==a.value&&(b.value=a.value)),b.removeAttribute(f.expando),b.removeAttribute("_submit_attached"),b.removeAttribute("_change_attached"))}function bj(a,b){if(b.nodeType===1&&!!f.hasData(a)){var c,d,e,g=f._data(a),h=f._data(b,g),i=g.events;if(i){delete h.handle,h.events={};for(c in i)for(d=0,e=i[c].length;d<e;d++)f.event.add(b,c,i[c][d])}h.data&&(h.data=f.extend({},h.data))}}function bi(a,b){return f.nodeName(a,"table")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function U(a){var b=V.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function T(a,b,c){b=b||0;if(f.isFunction(b))return f.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return f.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=f.grep(a,function(a){return a.nodeType===1});if(O.test(b))return f.filter(b,d,!c);b=f.filter(b,d)}return f.grep(a,function(a,d){return f.inArray(a,b)>=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?+d:j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c<d;c++)b[a[c]]=!0;return b}var c=a.document,d=a.navigator,e=a.location,f=function(){function J(){if(!e.isReady){try{c.documentElement.doScroll("left")}catch(a){setTimeout(J,1);return}e.ready()}}var e=function(a,b){return new e.fn.init(a,b,h)},f=a.jQuery,g=a.$,h,i=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.2",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j<k;j++)if((a=arguments[j])!=null)for(c in a){d=i[c],f=a[c];if(i===f)continue;l&&f&&(e.isPlainObject(f)||(g=e.isArray(f)))?(g?(g=!1,h=d&&e.isArray(d)?d:[]):h=d&&e.isPlainObject(d)?d:{},i[c]=e.extend(l,h,f)):f!==b&&(i[c]=f)}return i},e.extend({noConflict:function(b){a.$===e&&(a.$=g),b&&a.jQuery===e&&(a.jQuery=f);return e},isReady:!1,readyWait:1,holdReady:function(a){a?e.readyWait++:e.ready(!0)},ready:function(a){if(a===!0&&!--e.readyWait||a!==!0&&!e.isReady){if(!c.body)return setTimeout(e.ready,1);e.isReady=!0;if(a!==!0&&--e.readyWait>0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){if(typeof c!="string"||!c)return null;var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g<h;)if(c.apply(a[g++],d)===!1)break}else if(i){for(f in a)if(c.call(a[f],f,a[f])===!1)break}else for(;g<h;)if(c.call(a[g],g,a[g++])===!1)break;return a},trim:G?function(a){return a==null?"":G.call(a)}:function(a){return a==null?"":(a+"").replace(k,"").replace(l,"")},makeArray:function(a,b){var c=b||[];if(a!=null){var d=e.type(a);a.length==null||d==="string"||d==="function"||d==="regexp"||e.isWindow(a)?E.call(c,a):e.merge(c,a)}return c},inArray:function(a,b,c){var d;if(b){if(H)return H.call(b,a,c);d=b.length,c=c?c<0?Math.max(0,d+c):c:0;for(;c<d;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,c){var d=a.length,e=0;if(typeof c.length=="number")for(var f=c.length;e<f;e++)a[d++]=c[e];else while(c[e]!==b)a[d++]=c[e++];a.length=d;return a},grep:function(a,b,c){var d=[],e;c=!!c;for(var f=0,g=a.length;f<g;f++)e=!!b(a[f],f),c!==e&&d.push(a[f]);return d},map:function(a,c,d){var f,g,h=[],i=0,j=a.length,k=a instanceof e||j!==b&&typeof j=="number"&&(j>0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i<j;i++)f=c(a[i],i,d),f!=null&&(h[h.length]=f);else for(g in a)f=c(a[g],g,d),f!=null&&(h[h.length]=f);return h.concat.apply([],h)},guid:1,proxy:function(a,c){if(typeof c=="string"){var d=a[c];c=a,a=d}if(!e.isFunction(a))return b;var f=F.call(arguments,2),g=function(){return a.apply(c,f.concat(F.call(arguments)))};g.guid=a.guid=a.guid||g.guid||e.guid++;return g},access:function(a,c,d,f,g,h,i){var j,k=d==null,l=0,m=a.length;if(d&&typeof d=="object"){for(l in d)e.access(a,c,l,d[l],1,h,f);g=1}else if(f!==b){j=i===b&&e.isFunction(f),k&&(j?(j=c,c=function(a,b,c){return j.call(e(a),c)}):(c.call(a,f),c=null));if(c)for(;l<m;l++)c(a[l],d,j?f.call(a[l],l,c(a[l],d)):f,i);g=1}return g?a:k?c.call(a):m?c(a[0],d):h},now:function(){return(new Date).getTime()},uaMatch:function(a){a=a.toLowerCase();var b=r.exec(a)||s.exec(a)||t.exec(a)||a.indexOf("compatible")<0&&u.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},sub:function(){function a(b,c){return new a.fn.init(b,c)}e.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function(d,f){f&&f instanceof e&&!(f instanceof a)&&(f=a(f));return e.fn.init.call(this,d,f,b)},a.fn.init.prototype=a.fn;var b=a(c);return a},browser:{}}),e.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(a,b){I["[object "+b+"]"]=b.toLowerCase()}),z=e.uaMatch(y),z.browser&&(e.browser[z.browser]=!0,e.browser.version=z.version),e.browser.webkit&&(e.browser.safari=!0),j.test(" ")&&(k=/^[\s\xA0]+/,l=/[\s\xA0]+$/),h=e(c),c.addEventListener?B=function(){c.removeEventListener("DOMContentLoaded",B,!1),e.ready()}:c.attachEvent&&(B=function(){c.readyState==="complete"&&(c.detachEvent("onreadystatechange",B),e.ready())});return e}(),g={};f.Callbacks=function(a){a=a?g[a]||h(a):{};var c=[],d=[],e,i,j,k,l,m,n=function(b){var d,e,g,h,i;for(d=0,e=b.length;d<e;d++)g=b[d],h=f.type(g),h==="array"?n(g):h==="function"&&(!a.unique||!p.has(g))&&c.push(g)},o=function(b,f){f=f||[],e=!a.memory||[b,f],i=!0,j=!0,m=k||0,k=0,l=c.length;for(;c&&m<l;m++)if(c[m].apply(b,f)===!1&&a.stopOnFalse){e=!0;break}j=!1,c&&(a.once?e===!0?p.disable():c=[]:d&&d.length&&(e=d.shift(),p.fireWith(e[0],e[1])))},p={add:function(){if(c){var a=c.length;n(arguments),j?l=c.length:e&&e!==!0&&(k=a,o(e[0],e[1]))}return this},remove:function(){if(c){var b=arguments,d=0,e=b.length;for(;d<e;d++)for(var f=0;f<c.length;f++)if(b[d]===c[f]){j&&f<=l&&(l--,f<=m&&m--),c.splice(f--,1);if(a.unique)break}}return this},has:function(a){if(c){var b=0,d=c.length;for(;b<d;b++)if(a===c[b])return!0}return!1},empty:function(){c=[];return this},disable:function(){c=d=e=b;return this},disabled:function(){return!c},lock:function(){d=b,(!e||e===!0)&&p.disable();return this},locked:function(){return!d},fireWith:function(b,c){d&&(j?a.once||d.push([b,c]):(!a.once||!e)&&o(b,c));return this},fire:function(){p.fireWith(this,arguments);return this},fired:function(){return!!i}};return p};var i=[].slice;f.extend({Deferred:function(a){var b=f.Callbacks("once memory"),c=f.Callbacks("once memory"),d=f.Callbacks("memory"),e="pending",g={resolve:b,reject:c,notify:d},h={done:b.add,fail:c.add,progress:d.add,state:function(){return e},isResolved:b.fired,isRejected:c.fired,then:function(a,b,c){i.done(a).fail(b).progress(c);return this},always:function(){i.done.apply(i,arguments).fail.apply(i,arguments);return this},pipe:function(a,b,c){return f.Deferred(function(d){f.each({done:[a,"resolve"],fail:[b,"reject"],progress:[c,"notify"]},function(a,b){var c=b[0],e=b[1],g;f.isFunction(c)?i[a](function(){g=c.apply(this,arguments),g&&f.isFunction(g.promise)?g.promise().then(d.resolve,d.reject,d.notify):d[e+"With"](this===i?d:this,[g])}):i[a](d[e])})}).promise()},promise:function(a){if(a==null)a=h;else for(var b in h)a[b]=h[b];return a}},i=h.promise({}),j;for(j in g)i[j]=g[j].fire,i[j+"With"]=g[j].fireWith;i.done(function(){e="resolved"},c.disable,d.lock).fail(function(){e="rejected"},b.disable,d.lock),a&&a.call(i,i);return i},when:function(a){function m(a){return function(b){e[a]=arguments.length>1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c<d;c++)b[c]&&b[c].promise&&f.isFunction(b[c].promise)?b[c].promise().then(l(c),j.reject,m(c)):--g;g||j.resolveWith(j,b)}else j!==a&&j.resolveWith(j,d?[a]:[]);return k}}),f.support=function(){var b,d,e,g,h,i,j,k,l,m,n,o,p=c.createElement("div"),q=c.documentElement;p.setAttribute("className","t"),p.innerHTML=" <link/><table></table><a href='/a' style='top:1px;float:left;opacity:.55;'>a</a><input type='checkbox'/>",d=p.getElementsByTagName("*"),e=p.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=p.getElementsByTagName("input")[0],b={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:p.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,pixelMargin:!0},f.boxModel=b.boxModel=c.compatMode==="CSS1Compat",i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete p.test}catch(r){b.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",function(){b.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),i.setAttribute("name","t"),p.appendChild(i),j=c.createDocumentFragment(),j.appendChild(p.lastChild),b.checkClone=j.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,j.removeChild(i),j.appendChild(p);if(p.attachEvent)for(n in{submit:1,change:1,focusin:1})m="on"+n,o=m in p,o||(p.setAttribute(m,"return;"),o=typeof p[m]=="function"),b[n+"Bubbles"]=o;j.removeChild(p),j=g=h=p=i=null,f(function(){var d,e,g,h,i,j,l,m,n,q,r,s,t,u=c.getElementsByTagName("body")[0];!u||(m=1,t="padding:0;margin:0;border:",r="position:absolute;top:0;left:0;width:1px;height:1px;",s=t+"0;visibility:hidden;",n="style='"+r+t+"5px solid #000;",q="<div "+n+"display:block;'><div style='"+t+"0;display:block;overflow:hidden;'></div></div>"+"<table "+n+"' cellpadding='0' cellspacing='0'>"+"<tr><td></td></tr></table>",d=c.createElement("div"),d.style.cssText=s+"width:0;height:0;position:static;top:0;margin-top:"+m+"px",u.insertBefore(d,u.firstChild),p=c.createElement("div"),d.appendChild(p),p.innerHTML="<table><tr><td style='"+t+"0;display:none'></td><td>t</td></tr></table>",k=p.getElementsByTagName("td"),o=k[0].offsetHeight===0,k[0].style.display="",k[1].style.display="none",b.reliableHiddenOffsets=o&&k[0].offsetHeight===0,a.getComputedStyle&&(p.innerHTML="",l=c.createElement("div"),l.style.width="0",l.style.marginRight="0",p.style.width="2px",p.appendChild(l),b.reliableMarginRight=(parseInt((a.getComputedStyle(l,null)||{marginRight:0}).marginRight,10)||0)===0),typeof p.style.zoom!="undefined"&&(p.innerHTML="",p.style.width=p.style.padding="1px",p.style.border=0,p.style.overflow="hidden",p.style.display="inline",p.style.zoom=1,b.inlineBlockNeedsLayout=p.offsetWidth===3,p.style.display="block",p.style.overflow="visible",p.innerHTML="<div style='width:5px;'></div>",b.shrinkWrapBlocks=p.offsetWidth!==3),p.style.cssText=r+s,p.innerHTML=q,e=p.firstChild,g=e.firstChild,i=e.nextSibling.firstChild.firstChild,j={doesNotAddBorder:g.offsetTop!==5,doesAddBorderForTableAndCells:i.offsetTop===5},g.style.position="fixed",g.style.top="20px",j.fixedPosition=g.offsetTop===20||g.offsetTop===15,g.style.position=g.style.top="",e.style.overflow="hidden",e.style.position="relative",j.subtractsBorderForOverflowNotVisible=g.offsetTop===-5,j.doesNotIncludeMarginInBodyOffset=u.offsetTop!==m,a.getComputedStyle&&(p.style.marginTop="1%",b.pixelMargin=(a.getComputedStyle(p,null)||{marginTop:0}).marginTop!=="1%"),typeof d.style.zoom!="undefined"&&(d.style.zoom=1),u.removeChild(d),l=p=d=null,f.extend(b,j))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e<g;e++)delete d[b[e]];if(!(c?m:f.isEmptyObject)(d))return}}if(!c){delete j[k].data;if(!m(j[k]))return}f.support.deleteExpando||!j.setInterval?delete j[k]:j[k]=null,i&&(f.support.deleteExpando?delete a[h]:a.removeAttribute?a.removeAttribute(h):a[h]=null)}},_data:function(a,b,c){return f.data(a,b,c,!0)},acceptData:function(a){if(a.nodeName){var b=f.noData[a.nodeName.toLowerCase()];if(b)return b!==!0&&a.getAttribute("classid")===b}return!0}}),f.fn.extend({data:function(a,c){var d,e,g,h,i,j=this[0],k=0,m=null;if(a===b){if(this.length){m=f.data(j);if(j.nodeType===1&&!f._data(j,"parsedAttrs")){g=j.attributes;for(i=g.length;k<i;k++)h=g[k].name,h.indexOf("data-")===0&&(h=f.camelCase(h.substring(5)),l(j,h,m[h]));f._data(j,"parsedAttrs",!0)}}return m}if(typeof a=="object")return this.each(function(){f.data(this,a)});d=a.split(".",2),d[1]=d[1]?"."+d[1]:"",e=d[1]+"!";return f.access(this,function(c){if(c===b){m=this.triggerHandler("getData"+e,[d[0]]),m===b&&j&&(m=f.data(j,a),m=l(j,a,m));return m===b&&d[1]?this.data(d[0]):m}d[1]=c,this.each(function(){var b=f(this);b.triggerHandler("setData"+e,d),f.data(this,a,c),b.triggerHandler("changeData"+e,d)})},null,c,arguments.length>1,null,!1)},removeData:function(a){return this.each(function(){f.removeData(this,a)})}}),f.extend({_mark:function(a,b){a&&(b=(b||"fx")+"mark",f._data(a,b,(f._data(a,b)||0)+1))},_unmark:function(a,b,c){a!==!0&&(c=b,b=a,a=!1);if(b){c=c||"fx";var d=c+"mark",e=a?0:(f._data(b,d)||1)-1;e?f._data(b,d,e):(f.removeData(b,d,!0),n(b,c,"mark"))}},queue:function(a,b,c){var d;if(a){b=(b||"fx")+"queue",d=f._data(a,b),c&&(!d||f.isArray(c)?d=f._data(a,b,f.makeArray(c)):d.push(c));return d||[]}},dequeue:function(a,b){b=b||"fx";var c=f.queue(a,b),d=c.shift(),e={};d==="inprogress"&&(d=c.shift()),d&&(b==="fx"&&c.unshift("inprogress"),f._data(a,b+".run",e),d.call(a,function(){f.dequeue(a,b)},e)),c.length||(f.removeData(a,b+"queue "+b+".run",!0),n(a,b,"queue"))}}),f.fn.extend({queue:function(a,c){var d=2;typeof a!="string"&&(c=a,a="fx",d--);if(arguments.length<d)return f.queue(this[0],a);return c===b?this:this.each(function(){var b=f.queue(this,a,c);a==="fx"&&b[0]!=="inprogress"&&f.dequeue(this,a)})},dequeue:function(a){return this.each(function(){f.dequeue(this,a)})},delay:function(a,b){a=f.fx?f.fx.speeds[a]||a:a,b=b||"fx";return this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,c){function m(){--h||d.resolveWith(e,[e])}typeof a!="string"&&(c=a,a=b),a=a||"fx";var d=f.Deferred(),e=this,g=e.length,h=1,i=a+"defer",j=a+"queue",k=a+"mark",l;while(g--)if(l=f.data(e[g],i,b,!0)||(f.data(e[g],j,b,!0)||f.data(e[g],k,b,!0))&&f.data(e[g],i,f.Callbacks("once memory"),!0))h++,l.add(m);m();return d.promise(c)}});var o=/[\n\t\r]/g,p=/\s+/,q=/\r/g,r=/^(?:button|input)$/i,s=/^(?:button|input|object|select|textarea)$/i,t=/^a(?:rea)?$/i,u=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,v=f.support.getSetAttribute,w,x,y;f.fn.extend({attr:function(a,b){return f.access(this,f.attr,a,b,arguments.length>1)},removeAttr:function(a){return this.each(function(){f.removeAttr(this,a)})},prop:function(a,b){return f.access(this,f.prop,a,b,arguments.length>1)},removeProp:function(a){a=f.propFix[a]||a;return this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,g,h,i;if(f.isFunction(a))return this.each(function(b){f(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(p);for(c=0,d=this.length;c<d;c++){e=this[c];if(e.nodeType===1)if(!e.className&&b.length===1)e.className=a;else{g=" "+e.className+" ";for(h=0,i=b.length;h<i;h++)~g.indexOf(" "+b[h]+" ")||(g+=b[h]+" ");e.className=f.trim(g)}}}return this},removeClass:function(a){var c,d,e,g,h,i,j;if(f.isFunction(a))return this.each(function(b){f(this).removeClass(a.call(this,b,this.className))});if(a&&typeof a=="string"||a===b){c=(a||"").split(p);for(d=0,e=this.length;d<e;d++){g=this[d];if(g.nodeType===1&&g.className)if(a){h=(" "+g.className+" ").replace(o," ");for(i=0,j=c.length;i<j;i++)h=h.replace(" "+c[i]+" "," ");g.className=f.trim(h)}else g.className=""}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";if(f.isFunction(a))return this.each(function(c){f(this).toggleClass(a.call(this,c,this.className,b),b)});return this.each(function(){if(c==="string"){var e,g=0,h=f(this),i=b,j=a.split(p);while(e=j[g++])i=d?i:!h.hasClass(e),h[i?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&f._data(this,"__className__",this.className),this.className=this.className||a===!1?"":f._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c<d;c++)if(this[c].nodeType===1&&(" "+this[c].className+" ").replace(o," ").indexOf(b)>-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.type]||f.valHooks[this.nodeName.toLowerCase()];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.type]||f.valHooks[g.nodeName.toLowerCase()];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c<d;c++){e=i[c];if(e.selected&&(f.support.optDisabled?!e.disabled:e.getAttribute("disabled")===null)&&(!e.parentNode.disabled||!f.nodeName(e.parentNode,"optgroup"))){b=f(e).val();if(j)return b;h.push(b)}}if(j&&!h.length&&i.length)return f(i[g]).val();return h},set:function(a,b){var c=f.makeArray(b);f(a).find("option").each(function(){this.selected=f.inArray(f(this).val(),c)>=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h,i=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;i<g;i++)e=d[i],e&&(c=f.propFix[e]||e,h=u.test(e),h||f.attr(a,e,""),a.removeAttribute(v?e:c),h&&c in a&&(a[c]=!1))}},attrHooks:{type:{set:function(a,b){if(r.test(a.nodeName)&&a.parentNode)f.error("type property can't be changed");else if(!f.support.radioValue&&b==="radio"&&f.nodeName(a,"input")){var c=a.value;a.setAttribute("type",b),c&&(a.value=c);return b}}},value:{get:function(a,b){if(w&&f.nodeName(a,"button"))return w.get(a,b);return b in a?a.value:null},set:function(a,b,c){if(w&&f.nodeName(a,"button"))return w.set(a,b,c);a.value=b}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(a,c,d){var e,g,h,i=a.nodeType;if(!!a&&i!==3&&i!==8&&i!==2){h=i!==1||!f.isXMLDoc(a),h&&(c=f.propFix[c]||c,g=f.propHooks[c]);return d!==b?g&&"set"in g&&(e=g.set(a,d,c))!==b?e:a[c]=d:g&&"get"in g&&(e=g.get(a,c))!==null?e:a[c]}},propHooks:{tabIndex:{get:function(a){var c=a.getAttributeNode("tabindex");return c&&c.specified?parseInt(c.value,10):s.test(a.nodeName)||t.test(a.nodeName)&&a.href?0:b}}}}),f.attrHooks.tabindex=f.propHooks.tabIndex,x={get:function(a,c){var d,e=f.prop(a,c);return e===!0||typeof e!="boolean"&&(d=a.getAttributeNode(c))&&d.nodeValue!==!1?c.toLowerCase():b},set:function(a,b,c){var d;b===!1?f.removeAttr(a,c):(d=f.propFix[c]||c,d in a&&(a[d]=!0),a.setAttribute(c,c.toLowerCase()));return c}},v||(y={name:!0,id:!0,coords:!0},w=f.valHooks.button={get:function(a,c){var d;d=a.getAttributeNode(c);return d&&(y[c]?d.nodeValue!=="":d.specified)?d.nodeValue:b},set:function(a,b,d){var e=a.getAttributeNode(d);e||(e=c.createAttribute(d),a.setAttributeNode(e));return e.nodeValue=b+""}},f.attrHooks.tabindex.set=w.set,f.each(["width","height"],function(a,b){f.attrHooks[b]=f.extend(f.attrHooks[b],{set:function(a,c){if(c===""){a.setAttribute(b,"auto");return c}}})}),f.attrHooks.contenteditable={get:w.get,set:function(a,b,c){b===""&&(b="false"),w.set(a,b,c)}}),f.support.hrefNormalized||f.each(["href","src","width","height"],function(a,c){f.attrHooks[c]=f.extend(f.attrHooks[c],{get:function(a){var d=a.getAttribute(c,2);return d===null?b:d}})}),f.support.style||(f.attrHooks.style={get:function(a){return a.style.cssText.toLowerCase()||b},set:function(a,b){return a.style.cssText=""+b}}),f.support.optSelected||(f.propHooks.selected=f.extend(f.propHooks.selected,{get:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex);return null}})),f.support.enctype||(f.propFix.enctype="encoding"),f.support.checkOn||f.each(["radio","checkbox"],function(){f.valHooks[this]={get:function(a){return a.getAttribute("value")===null?"on":a.value}}}),f.each(["radio","checkbox"],function(){f.valHooks[this]=f.extend(f.valHooks[this],{set:function(a,b){if(f.isArray(b))return a.checked=f.inArray(f(a).val(),b)>=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/(?:^|\s)hover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(
-a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")};f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler,g=p.selector),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k<c.length;k++){l=A.exec(c[k])||[],m=l[1],n=(l[2]||"").split(".").sort(),s=f.event.special[m]||{},m=(g?s.delegateType:s.bindType)||m,s=f.event.special[m]||{},o=f.extend({type:m,origType:l[1],data:e,handler:d,guid:d.guid,selector:g,quick:g&&G(g),namespace:n.join(".")},p),r=j[m];if(!r){r=j[m]=[],r.delegateCount=0;if(!s.setup||s.setup.call(a,e,n,i)===!1)a.addEventListener?a.addEventListener(m,i,!1):a.attachEvent&&a.attachEvent("on"+m,i)}s.add&&(s.add.call(a,o),o.handler.guid||(o.handler.guid=d.guid)),g?r.splice(r.delegateCount++,0,o):r.push(o),f.event.global[m]=!0}a=null}},global:{},remove:function(a,b,c,d,e){var g=f.hasData(a)&&f._data(a),h,i,j,k,l,m,n,o,p,q,r,s;if(!!g&&!!(o=g.events)){b=f.trim(I(b||"")).split(" ");for(h=0;h<b.length;h++){i=A.exec(b[h])||[],j=k=i[1],l=i[2];if(!j){for(j in o)f.event.remove(a,j+b[h],c,d,!0);continue}p=f.event.special[j]||{},j=(d?p.delegateType:p.bindType)||j,r=o[j]||[],m=r.length,l=l?new RegExp("(^|\\.)"+l.split(".").sort().join("\\.(?:.*\\.)?")+"(\\.|$)"):null;for(n=0;n<r.length;n++)s=r[n],(e||k===s.origType)&&(!c||c.guid===s.guid)&&(!l||l.test(s.namespace))&&(!d||d===s.selector||d==="**"&&s.selector)&&(r.splice(n--,1),s.selector&&r.delegateCount--,p.remove&&p.remove.call(a,s));r.length===0&&m!==r.length&&((!p.teardown||p.teardown.call(a,l)===!1)&&f.removeEvent(a,j,g.handle),delete o[j])}f.isEmptyObject(o)&&(q=g.handle,q&&(q.elem=null),f.removeData(a,["events","handle"],!0))}},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(c,d,e,g){if(!e||e.nodeType!==3&&e.nodeType!==8){var h=c.type||c,i=[],j,k,l,m,n,o,p,q,r,s;if(E.test(h+f.event.triggered))return;h.indexOf("!")>=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;l<r.length&&!c.isPropagationStopped();l++)m=r[l][0],c.type=r[l][1],q=(f._data(m,"events")||{})[c.type]&&f._data(m,"handle"),q&&q.apply(m,d),q=o&&m[o],q&&f.acceptData(m)&&q.apply(m,d)===!1&&c.preventDefault();c.type=h,!g&&!c.isDefaultPrevented()&&(!p._default||p._default.apply(e.ownerDocument,d)===!1)&&(h!=="click"||!f.nodeName(e,"a"))&&f.acceptData(e)&&o&&e[h]&&(h!=="focus"&&h!=="blur"||c.target.offsetWidth!==0)&&!f.isWindow(e)&&(n=e[o],n&&(e[o]=null),f.event.triggered=h,e[h](),f.event.triggered=b,n&&(e[o]=n));return c.result}},dispatch:function(c){c=f.event.fix(c||a.event);var d=(f._data(this,"events")||{})[c.type]||[],e=d.delegateCount,g=[].slice.call(arguments,0),h=!c.exclusive&&!c.namespace,i=f.event.special[c.type]||{},j=[],k,l,m,n,o,p,q,r,s,t,u;g[0]=c,c.delegateTarget=this;if(!i.preDispatch||i.preDispatch.call(this,c)!==!1){if(e&&(!c.button||c.type!=="click")){n=f(this),n.context=this.ownerDocument||this;for(m=c.target;m!=this;m=m.parentNode||this)if(m.disabled!==!0){p={},r=[],n[0]=m;for(k=0;k<e;k++)s=d[k],t=s.selector,p[t]===b&&(p[t]=s.quick?H(m,s.quick):n.is(t)),p[t]&&r.push(s);r.length&&j.push({elem:m,matches:r})}}d.length>e&&j.push({elem:this,matches:d.slice(e)});for(k=0;k<j.length&&!c.isPropagationStopped();k++){q=j[k],c.currentTarget=q.elem;for(l=0;l<q.matches.length&&!c.isImmediatePropagationStopped();l++){s=q.matches[l];if(h||!c.namespace&&!s.namespace||c.namespace_re&&c.namespace_re.test(s.namespace))c.data=s.data,c.handleObj=s,o=((f.event.special[s.origType]||{}).handle||s.handler).apply(q.elem,g),o!==b&&(c.result=o,o===!1&&(c.preventDefault(),c.stopPropagation()))}}i.postDispatch&&i.postDispatch.call(this,c);return c.result}},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){a.which==null&&(a.which=b.charCode!=null?b.charCode:b.keyCode);return a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,d){var e,f,g,h=d.button,i=d.fromElement;a.pageX==null&&d.clientX!=null&&(e=a.target.ownerDocument||c,f=e.documentElement,g=e.body,a.pageX=d.clientX+(f&&f.scrollLeft||g&&g.scrollLeft||0)-(f&&f.clientLeft||g&&g.clientLeft||0),a.pageY=d.clientY+(f&&f.scrollTop||g&&g.scrollTop||0)-(f&&f.clientTop||g&&g.clientTop||0)),!a.relatedTarget&&i&&(a.relatedTarget=i===a.target?d.toElement:i),!a.which&&h!==b&&(a.which=h&1?1:h&2?3:h&4?2:0);return a}},fix:function(a){if(a[f.expando])return a;var d,e,g=a,h=f.event.fixHooks[a.type]||{},i=h.props?this.props.concat(h.props):this.props;a=f.Event(g);for(d=i.length;d;)e=i[--d],a[e]=g[e];a.target||(a.target=g.srcElement||c),a.target.nodeType===3&&(a.target=a.target.parentNode),a.metaKey===b&&(a.metaKey=a.ctrlKey);return h.filter?h.filter(a,g):a},special:{ready:{setup:f.bindReady},load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(a,b,c){f.isWindow(this)&&(this.onbeforeunload=c)},teardown:function(a,b){this.onbeforeunload===b&&(this.onbeforeunload=null)}}},simulate:function(a,b,c,d){var e=f.extend(new f.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?f.event.trigger(e,null,b):f.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},f.event.handle=f.event.dispatch,f.removeEvent=c.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)}:function(a,b,c){a.detachEvent&&a.detachEvent("on"+b,c)},f.Event=function(a,b){if(!(this instanceof f.Event))return new f.Event(a,b);a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||a.returnValue===!1||a.getPreventDefault&&a.getPreventDefault()?K:J):this.type=a,b&&f.extend(this,b),this.timeStamp=a&&a.timeStamp||f.now(),this[f.expando]=!0},f.Event.prototype={preventDefault:function(){this.isDefaultPrevented=K;var a=this.originalEvent;!a||(a.preventDefault?a.preventDefault():a.returnValue=!1)},stopPropagation:function(){this.isPropagationStopped=K;var a=this.originalEvent;!a||(a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=K,this.stopPropagation()},isDefaultPrevented:J,isPropagationStopped:J,isImmediatePropagationStopped:J},f.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){f.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c=this,d=a.relatedTarget,e=a.handleObj,g=e.selector,h;if(!d||d!==c&&!f.contains(c,d))a.type=e.origType,h=e.handler.apply(this,arguments),a.type=b;return h}}}),f.support.submitBubbles||(f.event.special.submit={setup:function(){if(f.nodeName(this,"form"))return!1;f.event.add(this,"click._submit keypress._submit",function(a){var c=a.target,d=f.nodeName(c,"input")||f.nodeName(c,"button")?c.form:b;d&&!d._submit_attached&&(f.event.add(d,"submit._submit",function(a){a._submit_bubble=!0}),d._submit_attached=!0)})},postDispatch:function(a){a._submit_bubble&&(delete a._submit_bubble,this.parentNode&&!a.isTrigger&&f.event.simulate("submit",this.parentNode,a,!0))},teardown:function(){if(f.nodeName(this,"form"))return!1;f.event.remove(this,"._submit")}}),f.support.changeBubbles||(f.event.special.change={setup:function(){if(z.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")f.event.add(this,"propertychange._change",function(a){a.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),f.event.add(this,"click._change",function(a){this._just_changed&&!a.isTrigger&&(this._just_changed=!1,f.event.simulate("change",this,a,!0))});return!1}f.event.add(this,"beforeactivate._change",function(a){var b=a.target;z.test(b.nodeName)&&!b._change_attached&&(f.event.add(b,"change._change",function(a){this.parentNode&&!a.isSimulated&&!a.isTrigger&&f.event.simulate("change",this.parentNode,a,!0)}),b._change_attached=!0)})},handle:function(a){var b=a.target;if(this!==b||a.isSimulated||a.isTrigger||b.type!=="radio"&&b.type!=="checkbox")return a.handleObj.handler.apply(this,arguments)},teardown:function(){f.event.remove(this,"._change");return z.test(this.nodeName)}}),f.support.focusinBubbles||f.each({focus:"focusin",blur:"focusout"},function(a,b){var d=0,e=function(a){f.event.simulate(b,a.target,f.event.fix(a),!0)};f.event.special[b]={setup:function(){d++===0&&c.addEventListener(a,e,!0)},teardown:function(){--d===0&&c.removeEventListener(a,e,!0)}}}),f.fn.extend({on:function(a,c,d,e,g){var h,i;if(typeof a=="object"){typeof c!="string"&&(d=d||c,c=b);for(i in a)this.on(i,c,d,a[i],g);return this}d==null&&e==null?(e=c,d=c=b):e==null&&(typeof c=="string"?(e=d,d=b):(e=d,d=c,c=b));if(e===!1)e=J;else if(!e)return this;g===1&&(h=e,e=function(a){f().off(a);return h.apply(this,arguments)},e.guid=h.guid||(h.guid=f.guid++));return this.each(function(){f.event.add(this,a,e,d,c)})},one:function(a,b,c,d){return this.on(a,b,c,d,1)},off:function(a,c,d){if(a&&a.preventDefault&&a.handleObj){var e=a.handleObj;f(a.delegateTarget).off(e.namespace?e.origType+"."+e.namespace:e.origType,e.selector,e.handler);return this}if(typeof a=="object"){for(var g in a)this.off(g,c,a[g]);return this}if(c===!1||typeof c=="function")d=c,c=b;d===!1&&(d=J);return this.each(function(){f.event.remove(this,a,d,c)})},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},live:function(a,b,c){f(this.context).on(a,this.selector,b,c);return this},die:function(a,b){f(this.context).off(a,this.selector||"**",b);return this},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return arguments.length==1?this.off(a,"**"):this.off(b,a,c)},trigger:function(a,b){return this.each(function(){f.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0])return f.event.trigger(a,b,this[0],!0)},toggle:function(a){var b=arguments,c=a.guid||f.guid++,d=0,e=function(c){var e=(f._data(this,"lastToggle"+a.guid)||0)%d;f._data(this,"lastToggle"+a.guid,e+1),c.preventDefault();return b[e].apply(this,arguments)||!1};e.guid=c;while(d<b.length)b[d++].guid=c;return this.click(e)},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),f.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){f.fn[b]=function(a,c){c==null&&(c=a,a=null);return arguments.length>0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h<i;h++){var j=e[h];if(j){var k=!1;j=j[a];while(j){if(j[d]===c){k=e[j.sizset];break}if(j.nodeType===1){g||(j[d]=c,j.sizset=h);if(typeof b!="string"){if(j===b){k=!0;break}}else if(m.filter(b,[j]).length>0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h<i;h++){var j=e[h];if(j){var k=!1;j=j[a];while(j){if(j[d]===c){k=e[j.sizset];break}j.nodeType===1&&!g&&(j[d]=c,j.sizset=h);if(j.nodeName.toLowerCase()===b){k=j;break}j=j[a]}e[h]=k}}}var a=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b<a.length;b++)a[b]===a[b-1]&&a.splice(b--,1)}return a},m.matches=function(a,b){return m(a,null,null,b)},m.matchesSelector=function(a,b){return m(b,null,null,[a]).length>0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e<f;e++){h=o.order[e];if(g=o.leftMatch[h].exec(a)){i=g[1],g.splice(1,1);if(i.substr(i.length-1)!=="\\"){g[1]=(g[1]||"").replace(j,""),d=o.find[h](g,b,c);if(d!=null){a=a.replace(o.match[h],"");break}}}}d||(d=typeof b.getElementsByTagName!="undefined"?b.getElementsByTagName("*"):[]);return{set:d,expr:a}},m.filter=function(a,c,d,e){var f,g,h,i,j,k,l,n,p,q=a,r=[],s=c,t=c&&c[0]&&m.isXML(c[0]);while(a&&c.length){for(h in o.filter)if((f=o.leftMatch[h].exec(a))!=null&&f[2]){k=o.filter[h],l=f[1],g=!1,f.splice(1,1);if(l.substr(l.length-1)==="\\")continue;s===r&&(r=[]);if(o.preFilter[h]){f=o.preFilter[h](f,s,d,r,e,t);if(!f)g=i=!0;else if(f===!0)continue}if(f)for(n=0;(j=s[n])!=null;n++)j&&(i=k(j,f,n,s),p=e^i,d&&i!=null?p?g=!0:s[n]=!1:p&&(r.push(j),g=!0));if(i!==b){d||(s=r),a=a.replace(o.match[h],"");if(!g)return[];break}}if(a===q)if(g==null)m.error(a);else break;q=a}return s},m.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)};var n=m.getText=function(a){var b,c,d=a.nodeType,e="";if(d){if(d===1||d===9||d===11){if(typeof a.textContent=="string")return a.textContent;if(typeof a.innerText=="string")return a.innerText.replace(k,"");for(a=a.firstChild;a;a=a.nextSibling)e+=n(a)}else if(d===3||d===4)return a.nodeValue}else for(b=0;c=a[b];b++)c.nodeType!==8&&(e+=n(c));return e},o=m.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,CLASS:/\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(a){return a.getAttribute("href")},type:function(a){return a.getAttribute("type")}},relative:{"+":function(a,b){var c=typeof b=="string",d=c&&!l.test(b),e=c&&!d;d&&(b=b.toLowerCase());for(var f=0,g=a.length,h;f<g;f++)if(h=a[f]){while((h=h.previousSibling)&&h.nodeType!==1);a[f]=e||h&&h.nodeName.toLowerCase()===b?h||!1:h===b}e&&m.filter(b,a,!0)},">":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e<f;e++){c=a[e];if(c){var g=c.parentNode;a[e]=g.nodeName.toLowerCase()===b?g:!1}}}else{for(;e<f;e++)c=a[e],c&&(a[e]=d?c.parentNode:c.parentNode===b);d&&m.filter(b,a,!0)}},"":function(a,b,c){var d,f=e++,g=x;typeof b=="string"&&!l.test(b)&&(b=b.toLowerCase(),d=b,g=w),g("parentNode",b,f,a,d,c)},"~":function(a,b,c){var d,f=e++,g=x;typeof b=="string"&&!l.test(b)&&(b=b.toLowerCase(),d=b,g=w),g("previousSibling",b,f,a,d,c)}},find:{ID:function(a,b,c){if(typeof b.getElementById!="undefined"&&!c){var d=b.getElementById(a[1]);return d&&d.parentNode?[d]:[]}},NAME:function(a,b){if(typeof b.getElementsByName!="undefined"){var c=[],d=b.getElementsByName(a[1]);for(var e=0,f=d.length;e<f;e++)d[e].getAttribute("name")===a[1]&&c.push(d[e]);return c.length===0?null:c}},TAG:function(a,b){if(typeof b.getElementsByTagName!="undefined")return b.getElementsByTagName(a[1])}},preFilter:{CLASS:function(a,b,c,d,e,f){a=" "+a[1].replace(j,"")+" ";if(f)return a;for(var g=0,h;(h=b[g])!=null;g++)h&&(e^(h.className&&(" "+h.className+" ").replace(/[\t\n\r]/g," ").indexOf(a)>=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return b<c[3]-0},gt:function(a,b,c){return b>c[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h<i;h++)if(g[h]===a)return!1;return!0}m.error(e)},CHILD:function(a,b){var c,e,f,g,h,i,j,k=b[1],l=a;switch(k){case"only":case"first":while(l=l.previousSibling)if(l.nodeType===1)return!1;if(k==="first")return!0;l=a;case"last":while(l=l.nextSibling)if(l.nodeType===1)return!1;return!0;case"nth":c=b[2],e=b[3];if(c===1&&e===0)return!0;f=b[0],g=a.parentNode;if(g&&(g[d]!==f||!a.nodeIndex)){i=0;for(l=g.firstChild;l;l=l.nextSibling)l.nodeType===1&&(l.nodeIndex=++i);g[d]=f}j=a.nodeIndex-e;return c===0?j===0:j%c===0&&j/c>=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));o.match.globalPOS=p;var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c<e;c++)d.push(a[c]);else for(;a[c];c++)d.push(a[c]);return d}}var u,v;c.documentElement.compareDocumentPosition?u=function(a,b){if(a===b){h=!0;return 0}if(!a.compareDocumentPosition||!b.compareDocumentPosition)return a.compareDocumentPosition?-1:1;return a.compareDocumentPosition(b)&4?-1:1}:(u=function(a,b){if(a===b){h=!0;return 0}if(a.sourceIndex&&b.sourceIndex)return a.sourceIndex-b.sourceIndex;var c,d,e=[],f=[],g=a.parentNode,i=b.parentNode,j=g;if(g===i)return v(a,b);if(!g)return-1;if(!i)return 1;while(j)e.unshift(j),j=j.parentNode;j=i;while(j)f.unshift(j),j=j.parentNode;c=e.length,d=f.length;for(var k=0;k<c&&k<d;k++)if(e[k]!==f[k])return v(e[k],f[k]);return k===c?v(a,f[k],-1):v(e[k],b,1)},v=function(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}),function(){var a=c.createElement("div"),d="script"+(new Date).getTime(),e=c.documentElement;a.innerHTML="<a name='"+d+"'/>",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="<a href='#'></a>",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="<p class='TEST'></p>";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="<div class='test e'></div><div class='test'></div>";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h<i;h++)m(a,g[h],e,c);return m.filter(f,e)};m.attr=f.attr,m.selectors.attrMap={},f.find=m,f.expr=m.selectors,f.expr[":"]=f.expr.filters,f.unique=m.uniqueSort,f.text=m.getText,f.isXMLDoc=m.isXML,f.contains=m.contains}();var L=/Until$/,M=/^(?:parents|prevUntil|prevAll)/,N=/,/,O=/^.[^:#\[\.,]*$/,P=Array.prototype.slice,Q=f.expr.match.globalPOS,R={children:!0,contents:!0,next:!0,prev:!0};f.fn.extend({find:function(a){var b=this,c,d;if(typeof a!="string")return f(a).filter(function(){for(c=0,d=b.length;c<d;c++)if(f.contains(b[c],this))return!0});var e=this.pushStack("","find",a),g,h,i;for(c=0,d=this.length;c<d;c++){g=e.length,f.find(a,this[c],e);if(c>0)for(h=g;h<e.length;h++)for(i=0;i<g;i++)if(e[i]===e[h]){e.splice(h--,1);break}}return e},has:function(a){var b=f(a);return this.filter(function(){for(var a=0,c=b.length;a<c;a++)if(f.contains(this,b[a]))return!0})},not:function(a){return this.pushStack(T(this,a,!1),"not",a)},filter:function(a){return this.pushStack(T(this,a,!0),"filter",a)},is:function(a){return!!a&&(typeof a=="string"?Q.test(a)?f(a,this.context).index(this[0])>=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d<a.length;d++)f(g).is(a[d])&&c.push({selector:a[d],elem:g,level:h});g=g.parentNode,h++}return c}var i=Q.test(a)||typeof a!="string"?f(a,b||this.context):0;for(d=0,e=this.length;d<e;d++){g=this[d];while(g){if(i?i.index(g)>-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/<tbody/i,_=/<|&#?\w+;/,ba=/<(?:script|style)/i,bb=/<(?:script|object|embed|option|style)/i,bc=new RegExp("<(?:"+V+")[\\s/>]","i"),bd=/checked\s*(?:[^=]|=\s*.checked.)/i,be=/\/(java|ecma)script/i,bf=/^\s*<!(?:\[CDATA\[|\-\-)/,bg={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div<div>","</div>"]),f.fn.extend({text:function(a){return f.access(this,function(a){return a===b?f.text(this):this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a))},null,a,arguments.length)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f
-.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){return f.access(this,function(a){var c=this[0]||{},d=0,e=this.length;if(a===b)return c.nodeType===1?c.innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1></$2>");try{for(;d<e;d++)c=this[d]||{},c.nodeType===1&&(f.cleanData(c.getElementsByTagName("*")),c.innerHTML=a);c=0}catch(g){}}c&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(a){if(this[0]&&this[0].parentNode){if(f.isFunction(a))return this.each(function(b){var c=f(this),d=c.html();c.replaceWith(a.call(this,b,d))});typeof a!="string"&&(a=f(a).detach());return this.each(function(){var b=this.nextSibling,c=this.parentNode;f(this).remove(),b?f(b).before(a):f(c).append(a)})}return this.length?this.pushStack(f(f.isFunction(a)?a():a),"replaceWith",a):this},detach:function(a){return this.remove(a,!0)},domManip:function(a,c,d){var e,g,h,i,j=a[0],k=[];if(!f.support.checkClone&&arguments.length===3&&typeof j=="string"&&bd.test(j))return this.each(function(){f(this).domManip(a,c,d,!0)});if(f.isFunction(j))return this.each(function(e){var g=f(this);a[0]=j.call(this,e,c?g.html():b),g.domManip(a,c,d)});if(this[0]){i=j&&j.parentNode,f.support.parentNode&&i&&i.nodeType===11&&i.childNodes.length===this.length?e={fragment:i}:e=f.buildFragment(a,this,k),h=e.fragment,h.childNodes.length===1?g=h=h.firstChild:g=h.firstChild;if(g){c=c&&f.nodeName(g,"tr");for(var l=0,m=this.length,n=m-1;l<m;l++)d.call(c?bi(this[l],g):this[l],e.cacheable||m>1&&l<n?f.clone(h,!0,!0):h)}k.length&&f.each(k,function(a,b){b.src?f.ajax({type:"GET",global:!1,url:b.src,async:!1,dataType:"script"}):f.globalEval((b.text||b.textContent||b.innerHTML||"").replace(bf,"/*$0*/")),b.parentNode&&b.parentNode.removeChild(b)})}return this}}),f.buildFragment=function(a,b,d){var e,g,h,i,j=a[0];b&&b[0]&&(i=b[0].ownerDocument||b[0]),i.createDocumentFragment||(i=c),a.length===1&&typeof j=="string"&&j.length<512&&i===c&&j.charAt(0)==="<"&&!bb.test(j)&&(f.support.checkClone||!bd.test(j))&&(f.support.html5Clone||!bc.test(j))&&(g=!0,h=f.fragments[j],h&&h!==1&&(e=h)),e||(e=i.createDocumentFragment(),f.clean(a,i,e,d)),g&&(f.fragments[j]=h?e:1);return{fragment:e,cacheable:g}},f.fragments={},f.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){f.fn[a]=function(c){var d=[],e=f(c),g=this.length===1&&this[0].parentNode;if(g&&g.nodeType===11&&g.childNodes.length===1&&e.length===1){e[b](this[0]);return this}for(var h=0,i=e.length;h<i;h++){var j=(h>0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||f.isXMLDoc(a)||!bc.test("<"+a.nodeName+">")?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g,h,i,j=[];b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);for(var k=0,l;(l=a[k])!=null;k++){typeof l=="number"&&(l+="");if(!l)continue;if(typeof l=="string")if(!_.test(l))l=b.createTextNode(l);else{l=l.replace(Y,"<$1></$2>");var m=(Z.exec(l)||["",""])[1].toLowerCase(),n=bg[m]||bg._default,o=n[0],p=b.createElement("div"),q=bh.childNodes,r;b===c?bh.appendChild(p):U(b).appendChild(p),p.innerHTML=n[1]+l+n[2];while(o--)p=p.lastChild;if(!f.support.tbody){var s=$.test(l),t=m==="table"&&!s?p.firstChild&&p.firstChild.childNodes:n[1]==="<table>"&&!s?p.childNodes:[];for(i=t.length-1;i>=0;--i)f.nodeName(t[i],"tbody")&&!t[i].childNodes.length&&t[i].parentNode.removeChild(t[i])}!f.support.leadingWhitespace&&X.test(l)&&p.insertBefore(b.createTextNode(X.exec(l)[0]),p.firstChild),l=p.childNodes,p&&(p.parentNode.removeChild(p),q.length>0&&(r=q[q.length-1],r&&r.parentNode&&r.parentNode.removeChild(r)))}var u;if(!f.support.appendChecked)if(l[0]&&typeof (u=l.length)=="number")for(i=0;i<u;i++)bn(l[i]);else bn(l);l.nodeType?j.push(l):j=f.merge(j,l)}if(d){g=function(a){return!a.type||be.test(a.type)};for(k=0;j[k];k++){h=j[k];if(e&&f.nodeName(h,"script")&&(!h.type||be.test(h.type)))e.push(h.parentNode?h.parentNode.removeChild(h):h);else{if(h.nodeType===1){var v=f.grep(h.getElementsByTagName("script"),g);j.splice.apply(j,[k+1,0].concat(v))}d.appendChild(h)}}}return j},cleanData:function(a){var b,c,d=f.cache,e=f.event.special,g=f.support.deleteExpando;for(var h=0,i;(i=a[h])!=null;h++){if(i.nodeName&&f.noData[i.nodeName.toLowerCase()])continue;c=i[f.expando];if(c){b=d[c];if(b&&b.events){for(var j in b.events)e[j]?f.event.remove(i,j):f.removeEvent(i,j,b.handle);b.handle&&(b.handle.elem=null)}g?delete i[f.expando]:i.removeAttribute&&i.removeAttribute(f.expando),delete d[c]}}}});var bp=/alpha\([^)]*\)/i,bq=/opacity=([^)]*)/,br=/([A-Z]|^ms)/g,bs=/^[\-+]?(?:\d*\.)?\d+$/i,bt=/^-?(?:\d*\.)?\d+(?!px)[^\d\s]+$/i,bu=/^([\-+])=([\-+.\de]+)/,bv=/^margin/,bw={position:"absolute",visibility:"hidden",display:"block"},bx=["Top","Right","Bottom","Left"],by,bz,bA;f.fn.css=function(a,c){return f.access(this,function(a,c,d){return d!==b?f.style(a,c,d):f.css(a,c)},a,c,arguments.length>1)},f.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=by(a,"opacity");return c===""?"1":c}return a.style.opacity}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":f.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!!a&&a.nodeType!==3&&a.nodeType!==8&&!!a.style){var g,h,i=f.camelCase(c),j=a.style,k=f.cssHooks[i];c=f.cssProps[i]||i;if(d===b){if(k&&"get"in k&&(g=k.get(a,!1,e))!==b)return g;return j[c]}h=typeof d,h==="string"&&(g=bu.exec(d))&&(d=+(g[1]+1)*+g[2]+parseFloat(f.css(a,c)),h="number");if(d==null||h==="number"&&isNaN(d))return;h==="number"&&!f.cssNumber[i]&&(d+="px");if(!k||!("set"in k)||(d=k.set(a,d))!==b)try{j[c]=d}catch(l){}}},css:function(a,c,d){var e,g;c=f.camelCase(c),g=f.cssHooks[c],c=f.cssProps[c]||c,c==="cssFloat"&&(c="float");if(g&&"get"in g&&(e=g.get(a,!0,d))!==b)return e;if(by)return by(a,c)},swap:function(a,b,c){var d={},e,f;for(f in b)d[f]=a.style[f],a.style[f]=b[f];e=c.call(a);for(f in b)a.style[f]=d[f];return e}}),f.curCSS=f.css,c.defaultView&&c.defaultView.getComputedStyle&&(bz=function(a,b){var c,d,e,g,h=a.style;b=b.replace(br,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b))),!f.support.pixelMargin&&e&&bv.test(b)&&bt.test(c)&&(g=h.width,h.width=c,c=e.width,h.width=g);return c}),c.documentElement.currentStyle&&(bA=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f==null&&g&&(e=g[b])&&(f=e),bt.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),by=bz||bA,f.each(["height","width"],function(a,b){f.cssHooks[b]={get:function(a,c,d){if(c)return a.offsetWidth!==0?bB(a,b,d):f.swap(a,bw,function(){return bB(a,b,d)})},set:function(a,b){return bs.test(b)?b+"px":b}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return bq.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bp,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bp.test(g)?g.replace(bp,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){return f.swap(a,{display:"inline-block"},function(){return b?by(a,"margin-right"):a.style.marginRight})}})}),f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)}),f.each({margin:"",padding:"",border:"Width"},function(a,b){f.cssHooks[a+b]={expand:function(c){var d,e=typeof c=="string"?c.split(" "):[c],f={};for(d=0;d<4;d++)f[a+bx[d]+b]=e[d]||e[d-2]||e[0];return f}}});var bC=/%20/g,bD=/\[\]$/,bE=/\r?\n/g,bF=/#.*$/,bG=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bH=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bI=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bJ=/^(?:GET|HEAD)$/,bK=/^\/\//,bL=/\?/,bM=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bN=/^(?:select|textarea)/i,bO=/\s+/,bP=/([?&])_=[^&]*/,bQ=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bR=f.fn.load,bS={},bT={},bU,bV,bW=["*/"]+["*"];try{bU=e.href}catch(bX){bU=c.createElement("a"),bU.href="",bU=bU.href}bV=bQ.exec(bU.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bR)return bR.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("<div>").append(c.replace(bM,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bN.test(this.nodeName)||bH.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bE,"\r\n")}}):{name:b.name,value:c.replace(bE,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b$(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b$(a,b);return a},ajaxSettings:{url:bU,isLocal:bI.test(bV[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bW},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bY(bS),ajaxTransport:bY(bT),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?ca(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cb(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bG.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bF,"").replace(bK,bV[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bO),d.crossDomain==null&&(r=bQ.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bV[1]&&r[2]==bV[2]&&(r[3]||(r[1]==="http:"?80:443))==(bV[3]||(bV[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),bZ(bS,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bJ.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bL.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bP,"$1_="+x);d.url=y+(y===d.url?(bL.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bW+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=bZ(bT,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)b_(g,a[g],c,e);return d.join("&").replace(bC,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cc=f.now(),cd=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cc++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=typeof b.data=="string"&&/^application\/x\-www\-form\-urlencoded/.test(b.contentType);if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(cd.test(b.url)||e&&cd.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(cd,l),b.url===j&&(e&&(k=k.replace(cd,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var ce=a.ActiveXObject?function(){for(var a in cg)cg[a](0,1)}:!1,cf=0,cg;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ch()||ci()}:ch,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,ce&&delete cg[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n);try{m.text=h.responseText}catch(a){}try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cf,ce&&(cg||(cg={},f(a).unload(ce)),cg[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var cj={},ck,cl,cm=/^(?:toggle|show|hide)$/,cn=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,co,cp=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cq;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(ct("show",3),a,b,c);for(var g=0,h=this.length;g<h;g++)d=this[g],d.style&&(e=d.style.display,!f._data(d,"olddisplay")&&e==="none"&&(e=d.style.display=""),(e===""&&f.css(d,"display")==="none"||!f.contains(d.ownerDocument.documentElement,d))&&f._data(d,"olddisplay",cu(d.nodeName)));for(g=0;g<h;g++){d=this[g];if(d.style){e=d.style.display;if(e===""||e==="none")d.style.display=f._data(d,"olddisplay")||""}}return this},hide:function(a,b,c){if(a||a===0)return this.animate(ct("hide",3),a,b,c);var d,e,g=0,h=this.length;for(;g<h;g++)d=this[g],d.style&&(e=f.css(d,"display"),e!=="none"&&!f._data(d,"olddisplay")&&f._data(d,"olddisplay",e));for(g=0;g<h;g++)this[g].style&&(this[g].style.display="none");return this},_toggle:f.fn.toggle,toggle:function(a,b,c){var d=typeof a=="boolean";f.isFunction(a)&&f.isFunction(b)?this._toggle.apply(this,arguments):a==null||d?this.each(function(){var b=d?a:f(this).is(":hidden");f(this)[b?"show":"hide"]()}):this.animate(ct("toggle",3),a,b,c);return this},fadeTo:function(a,b,c,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){function g(){e.queue===!1&&f._mark(this);var b=f.extend({},e),c=this.nodeType===1,d=c&&f(this).is(":hidden"),g,h,i,j,k,l,m,n,o,p,q;b.animatedProperties={};for(i in a){g=f.camelCase(i),i!==g&&(a[g]=a[i],delete a[i]);if((k=f.cssHooks[g])&&"expand"in k){l=k.expand(a[g]),delete a[g];for(i in l)i in a||(a[i]=l[i])}}for(g in a){h=a[g],f.isArray(h)?(b.animatedProperties[g]=h[1],h=a[g]=h[0]):b.animatedProperties[g]=b.specialEasing&&b.specialEasing[g]||b.easing||"swing";if(h==="hide"&&d||h==="show"&&!d)return b.complete.call(this);c&&(g==="height"||g==="width")&&(b.overflow=[this.style.overflow,this.style.overflowX,this.style.overflowY],f.css(this,"display")==="inline"&&f.css(this,"float")==="none"&&(!f.support.inlineBlockNeedsLayout||cu(this.nodeName)==="inline"?this.style.display="inline-block":this.style.zoom=1))}b.overflow!=null&&(this.style.overflow="hidden");for(i in a)j=new f.fx(this,b,i),h=a[i],cm.test(h)?(q=f._data(this,"toggle"+i)||(h==="toggle"?d?"show":"hide":0),q?(f._data(this,"toggle"+i,q==="show"?"hide":"show"),j[q]()):j[h]()):(m=cn.exec(h),n=j.cur(),m?(o=parseFloat(m[2]),p=m[3]||(f.cssNumber[i]?"":"px"),p!=="px"&&(f.style(this,i,(o||1)+p),n=(o||1)/j.cur()*n,f.style(this,i,n+p)),m[1]&&(o=(m[1]==="-="?-1:1)*o+n),j.custom(n,o,p)):j.custom(n,h,""));return!0}var e=f.speed(b,c,d);if(f.isEmptyObject(a))return this.each(e.complete,[!1]);a=f.extend({},a);return e.queue===!1?this.each(g):this.queue(e.queue,g)},stop:function(a,c,d){typeof a!="string"&&(d=c,c=a,a=b),c&&a!==!1&&this.queue(a||"fx",[]);return this.each(function(){function h(a,b,c){var e=b[c];f.removeData(a,c,!0),e.stop(d)}var b,c=!1,e=f.timers,g=f._data(this);d||f._unmark(!0,this);if(a==null)for(b in g)g[b]&&g[b].stop&&b.indexOf(".run")===b.length-4&&h(this,g,b);else g[b=a+".run"]&&g[b].stop&&h(this,g,b);for(b=e.length;b--;)e[b].elem===this&&(a==null||e[b].queue===a)&&(d?e[b](!0):e[b].saveState(),c=!0,e.splice(b,1));(!d||!c)&&f.dequeue(this,a)})}}),f.each({slideDown:ct("show",1),slideUp:ct("hide",1),slideToggle:ct("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){f.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),f.extend({speed:function(a,b,c){var d=a&&typeof a=="object"?f.extend({},a):{complete:c||!c&&b||f.isFunction(a)&&a,duration:a,easing:c&&b||b&&!f.isFunction(b)&&b};d.duration=f.fx.off?0:typeof d.duration=="number"?d.duration:d.duration in f.fx.speeds?f.fx.speeds[d.duration]:f.fx.speeds._default;if(d.queue==null||d.queue===!0)d.queue="fx";d.old=d.complete,d.complete=function(a){f.isFunction(d.old)&&d.old.call(this),d.queue?f.dequeue(this,d.queue):a!==!1&&f._unmark(this)};return d},easing:{linear:function(a){return a},swing:function(a){return-Math.cos(a*Math.PI)/2+.5}},timers:[],fx:function(a,b,c){this.options=b,this.elem=a,this.prop=c,b.orig=b.orig||{}}}),f.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this),(f.fx.step[this.prop]||f.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a,b=f.css(this.elem,this.prop);return isNaN(a=parseFloat(b))?!b||b==="auto"?0:b:a},custom:function(a,c,d){function h(a){return e.step(a)}var e=this,g=f.fx;this.startTime=cq||cr(),this.end=c,this.now=this.start=a,this.pos=this.state=0,this.unit=d||this.unit||(f.cssNumber[this.prop]?"":"px"),h.queue=this.options.queue,h.elem=this.elem,h.saveState=function(){f._data(e.elem,"fxshow"+e.prop)===b&&(e.options.hide?f._data(e.elem,"fxshow"+e.prop,e.start):e.options.show&&f._data(e.elem,"fxshow"+e.prop,e.end))},h()&&f.timers.push(h)&&!co&&(co=setInterval(g.tick,g.interval))},show:function(){var a=f._data(this.elem,"fxshow"+this.prop);this.options.orig[this.prop]=a||f.style(this.elem,this.prop),this.options.show=!0,a!==b?this.custom(this.cur(),a):this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur()),f(this.elem).show()},hide:function(){this.options.orig[this.prop]=f._data(this.elem,"fxshow"+this.prop)||f.style(this.elem,this.prop),this.options.hide=!0,this.custom(this.cur(),0)},step:function(a){var b,c,d,e=cq||cr(),g=!0,h=this.elem,i=this.options;if(a||e>=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c<b.length;c++)a=b[c],!a()&&b[c]===a&&b.splice(c--,1);b.length||f.fx.stop()},interval:13,stop:function(){clearInterval(co),co=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){f.style(a.elem,"opacity",a.now)},_default:function(a){a.elem.style&&a.elem.style[a.prop]!=null?a.elem.style[a.prop]=a.now+a.unit:a.elem[a.prop]=a.now}}}),f.each(cp.concat.apply([],cp),function(a,b){b.indexOf("margin")&&(f.fx.step[b]=function(a){f.style(a.elem,b,Math.max(0,a.now)+a.unit)})}),f.expr&&f.expr.filters&&(f.expr.filters.animated=function(a){return f.grep(f.timers,function(b){return a===b.elem}).length});var cv,cw=/^t(?:able|d|h)$/i,cx=/^(?:body|html)$/i;"getBoundingClientRect"in c.documentElement?cv=function(a,b,c,d){try{d=a.getBoundingClientRect()}catch(e){}if(!d||!f.contains(c,a))return d?{top:d.top,left:d.left}:{top:0,left:0};var g=b.body,h=cy(b),i=c.clientTop||g.clientTop||0,j=c.clientLeft||g.clientLeft||0,k=h.pageYOffset||f.support.boxModel&&c.scrollTop||g.scrollTop,l=h.pageXOffset||f.support.boxModel&&c.scrollLeft||g.scrollLeft,m=d.top+k-i,n=d.left+l-j;return{top:m,left:n}}:cv=function(a,b,c){var d,e=a.offsetParent,g=a,h=b.body,i=b.defaultView,j=i?i.getComputedStyle(a,null):a.currentStyle,k=a.offsetTop,l=a.offsetLeft;while((a=a.parentNode)&&a!==h&&a!==c){if(f.support.fixedPosition&&j.position==="fixed")break;d=i?i.getComputedStyle(a,null):a.currentStyle,k-=a.scrollTop,l-=a.scrollLeft,a===e&&(k+=a.offsetTop,l+=a.offsetLeft,f.support.doesNotAddBorder&&(!f.support.doesAddBorderForTableAndCells||!cw.test(a.nodeName))&&(k+=parseFloat(d.borderTopWidth)||0,l+=parseFloat(d.borderLeftWidth)||0),g=e,e=a.offsetParent),f.support.subtractsBorderForOverflowNotVisible&&d.overflow!=="visible"&&(k+=parseFloat(d.borderTopWidth)||0,l+=parseFloat(d.borderLeftWidth)||0),j=d}if(j.position==="relative"||j.position==="static")k+=h.offsetTop,l+=h.offsetLeft;f.support.fixedPosition&&j.position==="fixed"&&(k+=Math.max(c.scrollTop,h.scrollTop),l+=Math.max(c.scrollLeft,h.scrollLeft));return{top:k,left:l}},f.fn.offset=function(a){if(arguments.length)return a===b?this:this.each(function(b){f.offset.setOffset(this,a,b)});var c=this[0],d=c&&c.ownerDocument;if(!d)return null;if(c===d.body)return f.offset.bodyOffset(c);return cv(c,d,d.documentElement)},f.offset={bodyOffset:function(a){var b=a.offsetTop,c=a.offsetLeft;f.support.doesNotIncludeMarginInBodyOffset&&(b+=parseFloat(f.css(a,"marginTop"))||0,c+=parseFloat(f.css(a,"marginLeft"))||0);return{top:b,left:c}},setOffset:function(a,b,c){var d=f.css(a,"position");d==="static"&&(a.style.position="relative");var e=f(a),g=e.offset(),h=f.css(a,"top"),i=f.css(a,"left"),j=(d==="absolute"||d==="fixed")&&f.inArray("auto",[h,i])>-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,c){var d=/Y/.test(c);f.fn[a]=function(e){return f.access(this,function(a,e,g){var h=cy(a);if(g===b)return h?c in h?h[c]:f.support.boxModel&&h.document.documentElement[e]||h.document.body[e]:a[e];h?h.scrollTo(d?f(h).scrollLeft():g,d?g:f(h).scrollTop()):a[e]=g},a,e,arguments.length,null)}}),f.each({Height:"height",Width:"width"},function(a,c){var d="client"+a,e="scroll"+a,g="offset"+a;f.fn["inner"+a]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,c,"padding")):this[c]():null},f.fn["outer"+a]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,c,a?"margin":"border")):this[c]():null},f.fn[c]=function(a){return f.access(this,function(a,c,h){var i,j,k,l;if(f.isWindow(a)){i=a.document,j=i.documentElement[d];return f.support.boxModel&&j||i.body&&i.body[d]||j}if(a.nodeType===9){i=a.documentElement;if(i[d]>=i[e])return i[d];return Math.max(a.body[e],i[e],a.body[g],i[g])}if(h===b){k=f.css(a,c),l=parseFloat(k);return f.isNumeric(l)?l:k}f(a).css(c,h)},c,a,arguments.length,null)}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window); \ No newline at end of file
+/*!
+ * jQuery JavaScript Library v1.4.2
+ * http://jquery.com/
+ *
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
+ */
+(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
+e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
+j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
+"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
+true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
+Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
+(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
+a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
+"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
+function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
+c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
+L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
+"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
+a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
+d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
+a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
+!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
+true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
+var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
+parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
+false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
+s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
+applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
+else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
+a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
+w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
+cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
+i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
+" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
+this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
+e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
+c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
+a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
+function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
+k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
+C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
+null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
+e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
+f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
+if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
+"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
+a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
+isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
+{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
+if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
+e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
+"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
+d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
+!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
+toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
+u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
+function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
+if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
+e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
+t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
+g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
+for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
+1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
+relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
+l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
+h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
+CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
+g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
+text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
+setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
+h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
+m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
+"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
+h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
+!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
+h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
+q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
+if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
+(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
+function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
+gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
+c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
+{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
+"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
+d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
+a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
+1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
+a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
+c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
+wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
+prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
+this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
+return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
+""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
+this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
+u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
+1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
+return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
+""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
+c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
+c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
+function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
+Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
+"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
+a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
+a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
+"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
+serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
+function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
+global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
+e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
+"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
+false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
+false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
+c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
+d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
+g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
+1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
+"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
+if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
+this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
+"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
+animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
+j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
+this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
+"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
+c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
+this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
+this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
+e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
+c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
+function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
+this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
+k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
+f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
+a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
+c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
+d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
+f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
+"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
+e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
index 4e0c9d75c7..4ab99764ce 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
@@ -1,5448 +1,18 @@
-/**
- * @preserve jquery.layout 1.3.0 - Release Candidate 30.5
- * $Date: 2012-04-14 08:00:00 (Sat, 14 Apr 2012) $
- * $Rev: 303005 $
+/*
+ * jquery.layout 1.3.0 - Release Candidate 29.3
*
- * Copyright (c) 2012
+ * Copyright (c) 2010
* Fabrizio Balliano (http://www.fabrizioballiano.net)
* Kevin Dalman (http://allpro.net)
*
* Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
* and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
*
- * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.5
- *
* Docs: http://layout.jquery-dev.net/documentation.html
* Tips: http://layout.jquery-dev.net/tips.html
* Help: http://groups.google.com/group/jquery-ui-layout
- */
-
-/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html
- * {!Object} non-nullable type (never NULL)
- * {?string} nullable type (sometimes NULL) - default for {Object}
- * {number=} optional parameter
- * {*} ALL types
- */
-
-// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars
-
-;(function ($) {
-
-// alias Math methods - used a lot!
-var min = Math.min
-, max = Math.max
-, round = Math.floor
-;
-function isStr (v) { return $.type(v) === "string"; }
-
-function runPluginCallbacks (Instance, a_fn) {
- if ($.isArray(a_fn))
- for (var i=0, c=a_fn.length; i<c; i++) {
- var fn = a_fn[i];
- try {
- if (isStr(fn)) // 'name' of a function
- fn = eval(fn);
- if ($.isFunction(fn))
- fn( Instance );
- } catch (ex) {}
- }
-};
-
-
-
-/*
- * GENERIC $.layout METHODS - used by all layouts
- */
-$.layout = {
-
- version: "1.3.rc30.5"
-, revision: 0.033005 // 1.3.0 final = 1.0300 - major(n+).minor(nn)+patch(nn+)
-
- // LANGUAGE CUSTOMIZATION
-, language: {
- // Tips and messages for resizers, togglers, custom buttons, etc.
- Open: "Open" // eg: "Open Pane"
- , Close: "Close"
- , Resize: "Resize"
- , Slide: "Slide Open"
- , Pin: "Pin"
- , Unpin: "Un-Pin"
- , noRoomToOpenTip: "Not enough room to show this pane."
- , minSizeWarning: "Panel has reached its minimum size"
- , maxSizeWarning: "Panel has reached its maximum size"
- // Developer error messages
- , pane: "pane" // description of "layout pane element"
- , selector: "selector" // description of "jQuery-selector"
- , errButton: "Error Adding Button \n\nInvalid "
- , errContainerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist."
- , errCenterPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element."
- , errContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!"
- }
-
- // can update code here if $.browser is phased out
-, browser: {
- mozilla: !!$.browser.mozilla
- , webkit: !!$.browser.webkit || !!$.browser.safari // webkit = jQ 1.4
- , msie: !!$.browser.msie
- , isIE6: !!$.browser.msie && $.browser.version == 6
- , version: $.browser.version // not used in Layout core, but may be used by plugins
- }
-
- // *PREDEFINED* EFFECTS & DEFAULTS
- // MUST list effect here - OR MUST set an fxSettings option (can be an empty hash: {})
-, effects: {
-
- // Pane Open/Close Animations
- slide: {
- all: { duration: "fast" } // eg: duration: 1000, easing: "easeOutBounce"
- , north: { direction: "up" }
- , south: { direction: "down" }
- , east: { direction: "right"}
- , west: { direction: "left" }
- }
- , drop: {
- all: { duration: "slow" }
- , north: { direction: "up" }
- , south: { direction: "down" }
- , east: { direction: "right"}
- , west: { direction: "left" }
- }
- , scale: {
- all: { duration: "fast" }
- }
- // these are not recommended, but can be used
- , blind: {}
- , clip: {}
- , explode: {}
- , fade: {}
- , fold: {}
- , puff: {}
-
- // Pane Resize Animations
- , size: {
- all: { easing: "swing" }
- }
- }
-
- // INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
-, config: {
- optionRootKeys: "effects,panes,north,south,west,east,center".split(",")
- , allPanes: "north,south,west,east,center".split(",")
- , borderPanes: "north,south,west,east".split(",")
- , oppositeEdge: {
- north: "south"
- , south: "north"
- , east: "west"
- , west: "east"
- }
- // offscreen data
- , offscreenCSS: { left: "-99999px", right: "auto" } // used by hide/close if useOffscreenClose=true
- , offscreenReset: "offscreenReset" // key used for data
- // CSS used in multiple places
- , hidden: { visibility: "hidden" }
- , visible: { visibility: "visible" }
- // layout element settings
- , resizers: {
- cssReq: {
- position: "absolute"
- , padding: 0
- , margin: 0
- , fontSize: "1px"
- , textAlign: "left" // to counter-act "center" alignment!
- , overflow: "hidden" // prevent toggler-button from overflowing
- // SEE $.layout.defaults.zIndexes.resizer_normal
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- background: "#DDD"
- , border: "none"
- }
- }
- , togglers: {
- cssReq: {
- position: "absolute"
- , display: "block"
- , padding: 0
- , margin: 0
- , overflow: "hidden"
- , textAlign: "center"
- , fontSize: "1px"
- , cursor: "pointer"
- , zIndex: 1
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- background: "#AAA"
- }
- }
- , content: {
- cssReq: {
- position: "relative" /* contain floated or positioned elements */
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- overflow: "auto"
- , padding: "10px"
- }
- , cssDemoPane: { // DEMO CSS - REMOVE scrolling from 'pane' when it has a content-div
- overflow: "hidden"
- , padding: 0
- }
- }
- , panes: { // defaults for ALL panes - overridden by 'per-pane settings' below
- cssReq: {
- position: "absolute"
- , margin: 0
- // $.layout.defaults.zIndexes.pane_normal
- }
- , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
- padding: "10px"
- , background: "#FFF"
- , border: "1px solid #BBB"
- , overflow: "auto"
- }
- }
- , north: {
- side: "Top"
- , sizeType: "Height"
- , dir: "horz"
- , cssReq: {
- top: 0
- , bottom: "auto"
- , left: 0
- , right: 0
- , width: "auto"
- // height: DYNAMIC
- }
- }
- , south: {
- side: "Bottom"
- , sizeType: "Height"
- , dir: "horz"
- , cssReq: {
- top: "auto"
- , bottom: 0
- , left: 0
- , right: 0
- , width: "auto"
- // height: DYNAMIC
- }
- }
- , east: {
- side: "Right"
- , sizeType: "Width"
- , dir: "vert"
- , cssReq: {
- left: "auto"
- , right: 0
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- // width: DYNAMIC
- }
- }
- , west: {
- side: "Left"
- , sizeType: "Width"
- , dir: "vert"
- , cssReq: {
- left: 0
- , right: "auto"
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- // width: DYNAMIC
- }
- }
- , center: {
- dir: "center"
- , cssReq: {
- left: "auto" // DYNAMIC
- , right: "auto" // DYNAMIC
- , top: "auto" // DYNAMIC
- , bottom: "auto" // DYNAMIC
- , height: "auto"
- , width: "auto"
- }
- }
- }
-
- // CALLBACK FUNCTION NAMESPACE - used to store reusable callback functions
-, callbacks: {}
-
-, getParentPaneElem: function (el) {
- // must pass either a container or pane element
- var $el = $(el)
- , layout = $el.data("layout") || $el.data("parentLayout");
- if (layout) {
- var $cont = layout.container;
- // see if this container is directly-nested inside an outer-pane
- if ($cont.data("layoutPane")) return $cont;
- var $pane = $cont.closest("."+ $.layout.defaults.panes.paneClass);
- // if a pane was found, return it
- if ($pane.data("layoutPane")) return $pane;
- }
- return null;
- }
-
-, getParentPaneInstance: function (el) {
- // must pass either a container or pane element
- var $pane = $.layout.getParentPaneElem(el);
- return $pane ? $pane.data("layoutPane") : null;
- }
-
-, getParentLayoutInstance: function (el) {
- // must pass either a container or pane element
- var $pane = $.layout.getParentPaneElem(el);
- return $pane ? $pane.data("parentLayout") : null;
- }
-
-, getEventObject: function (evt) {
- return typeof evt === "object" && evt.stopPropagation ? evt : null;
- }
-, parsePaneName: function (evt_or_pane) {
- // getEventObject() automatically calls .stopPropagation(), WHICH MUST BE DONE!
- var evt = $.layout.getEventObject( evt_or_pane );
- if (evt) {
- // ALWAYS stop propagation of events triggered in Layout!
- evt.stopPropagation();
- return $(this).data("layoutEdge");
- }
- else
- return evt_or_pane;
- }
-
-
- // LAYOUT-PLUGIN REGISTRATION
- // more plugins can added beyond this default list
-, plugins: {
- draggable: !!$.fn.draggable // resizing
- , effects: {
- core: !!$.effects // animimations (specific effects tested by initOptions)
- , slide: $.effects && $.effects.slide // default effect
- }
- }
-
-// arrays of plugin or other methods to be triggered for events in *each layout* - will be passed 'Instance'
-, onCreate: [] // runs when layout is just starting to be created - right after options are set
-, onLoad: [] // runs after layout container and global events init, but before initPanes is called
-, onReady: [] // runs after initialization *completes* - ie, after initPanes completes successfully
-, onDestroy: [] // runs after layout is destroyed
-, onUnload: [] // runs after layout is destroyed OR when page unloads
-, afterOpen: [] // runs after setAsOpen() completes
-, afterClose: [] // runs after setAsClosed() completes
-
- /*
- * GENERIC UTILITY METHODS
- */
-
- // calculate and return the scrollbar width, as an integer
-, scrollbarWidth: function () { return window.scrollbarWidth || $.layout.getScrollbarSize('width'); }
-, scrollbarHeight: function () { return window.scrollbarHeight || $.layout.getScrollbarSize('height'); }
-, getScrollbarSize: function (dim) {
- var $c = $('<div style="position: absolute; top: -10000px; left: -10000px; width: 100px; height: 100px; overflow: scroll;"></div>').appendTo("body");
- var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight };
- $c.remove();
- window.scrollbarWidth = d.width;
- window.scrollbarHeight = d.height;
- return dim.match(/^(width|height)$/) ? d[dim] : d;
- }
-
-
- /**
- * Returns hash container 'display' and 'visibility'
- *
- * @see $.swap() - swaps CSS, runs callback, resets CSS
- */
-, showInvisibly: function ($E, force) {
- if (!$E) return {};
- if (!$E.jquery) $E = $($E);
- var CSS = {
- display: $E.css('display')
- , visibility: $E.css('visibility')
- };
- if (force || CSS.display === "none") { // only if not *already hidden*
- $E.css({ display: "block", visibility: "hidden" }); // show element 'invisibly' so can be measured
- return CSS;
- }
- else return {};
- }
-
- /**
- * Returns data for setting size of an element (container or a pane).
- *
- * @see _create(), onWindowResize() for container, plus others for pane
- * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc
- */
-, getElementDimensions: function ($E) {
- var
- d = {} // dimensions hash
- , x = d.css = {} // CSS hash
- , i = {} // TEMP insets
- , b, p // TEMP border, padding
- , N = $.layout.cssNum
- , off = $E.offset()
- ;
- d.offsetLeft = off.left;
- d.offsetTop = off.top;
-
- $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge
- b = x["border" + e] = $.layout.borderWidth($E, e);
- p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e);
- i[e] = b + p; // total offset of content from outer side
- d["inset"+ e] = p;
- });
-
- d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize
- d.offsetHeight = $E.innerHeight(); // ditto
- d.outerWidth = $E.outerWidth();
- d.outerHeight = $E.outerHeight();
- d.innerWidth = max(0, d.outerWidth - i.Left - i.Right);
- d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom);
-
- x.width = $E.width();
- x.height = $E.height();
- x.top = N($E,"top",true);
- x.bottom = N($E,"bottom",true);
- x.left = N($E,"left",true);
- x.right = N($E,"right",true);
-
- //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0;
-
- return d;
- }
-
-, getElementCSS: function ($E, list) {
- var
- CSS = {}
- , style = $E[0].style
- , props = list.split(",")
- , sides = "Top,Bottom,Left,Right".split(",")
- , attrs = "Color,Style,Width".split(",")
- , p, s, a, i, j, k
- ;
- for (i=0; i < props.length; i++) {
- p = props[i];
- if (p.match(/(border|padding|margin)$/))
- for (j=0; j < 4; j++) {
- s = sides[j];
- if (p === "border")
- for (k=0; k < 3; k++) {
- a = attrs[k];
- CSS[p+s+a] = style[p+s+a];
- }
- else
- CSS[p+s] = style[p+s];
- }
- else
- CSS[p] = style[p];
- };
- return CSS
- }
-
- /**
- * Return the innerWidth for the current browser/doctype
- *
- * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerWidth of the elem by subtracting padding and borders
- */
-, cssWidth: function ($E, outerWidth) {
- var
- b = $.layout.borderWidth
- , n = $.layout.cssNum
- ;
- // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
- if (outerWidth <= 0) return 0;
-
- if (!$.support.boxModel) return outerWidth;
-
- // strip border and padding from outerWidth to get CSS Width
- var W = outerWidth
- - b($E, "Left")
- - b($E, "Right")
- - n($E, "paddingLeft")
- - n($E, "paddingRight")
- ;
-
- return max(0,W);
- }
-
- /**
- * Return the innerHeight for the current browser/doctype
- *
- * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerHeight of the elem by subtracting padding and borders
- */
-, cssHeight: function ($E, outerHeight) {
- var
- b = $.layout.borderWidth
- , n = $.layout.cssNum
- ;
- // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
- if (outerHeight <= 0) return 0;
-
- if (!$.support.boxModel) return outerHeight;
-
- // strip border and padding from outerHeight to get CSS Height
- var H = outerHeight
- - b($E, "Top")
- - b($E, "Bottom")
- - n($E, "paddingTop")
- - n($E, "paddingBottom")
- ;
-
- return max(0,H);
- }
-
- /**
- * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist
- *
- * @see Called by many methods
- * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
- * @param {string} prop The name of the CSS property, eg: top, width, etc.
- * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0
- * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width)
- */
-, cssNum: function ($E, prop, allowAuto) {
- if (!$E.jquery) $E = $($E);
- var CSS = $.layout.showInvisibly($E)
- , p = $.curCSS($E[0], prop, true)
- , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0);
- $E.css( CSS ); // RESET
- return v;
- }
-
-, borderWidth: function (el, side) {
- if (el.jquery) el = el[0];
- var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left
- return $.curCSS(el, b+"Style", true) === "none" ? 0 : (parseInt($.curCSS(el, b+"Width", true), 10) || 0);
- }
-
- /**
- * Mouse-tracking utility - FUTURE REFERENCE
- *
- * init: if (!window.mouse) {
- * window.mouse = { x: 0, y: 0 };
- * $(document).mousemove( $.layout.trackMouse );
- * }
- *
- * @param {Object} evt
- *
-, trackMouse: function (evt) {
- window.mouse = { x: evt.clientX, y: evt.clientY };
- }
- */
-
- /**
- * SUBROUTINE for preventPrematureSlideClose option
- *
- * @param {Object} evt
- * @param {Object=} el
- */
-, isMouseOverElem: function (evt, el) {
- var
- $E = $(el || this)
- , d = $E.offset()
- , T = d.top
- , L = d.left
- , R = L + $E.outerWidth()
- , B = T + $E.outerHeight()
- , x = evt.pageX // evt.clientX ?
- , y = evt.pageY // evt.clientY ?
- ;
- // if X & Y are < 0, probably means is over an open SELECT
- return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B));
- }
-
- /**
- * Message/Logging Utility
- *
- * @example $.layout.msg("My message"); // log text
- * @example $.layout.msg("My message", true); // alert text
- * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title
- * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR-
- * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data
- *
- * @param {(Object|string)} info String message OR Hash/Array
- * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped
- * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped
- * @param {Object=} [debutOpts={}] Extra options for debug output
- */
-, msg: function (info, popup, debugTitle, debugOpts) {
- if ($.isPlainObject(info) && window.debugData) {
- if (typeof popup === "string") {
- debugOpts = debugTitle;
- debugTitle = popup;
- }
- else if (typeof debugTitle === "object") {
- debugOpts = debugTitle;
- debugTitle = null;
- }
- var t = debugTitle || "log( <object> )"
- , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts);
- if (popup === true || o.display)
- debugData( info, t, o );
- else if (window.console)
- console.log(debugData( info, t, o ));
- }
- else if (popup)
- alert(info);
- else if (window.console)
- console.log(info);
- else {
- var id = "#layoutLogger"
- , $l = $(id);
- if (!$l.length)
- $l = createLog();
- $l.children("ul").append('<li style="padding: 4px 10px; margin: 0; border-top: 1px solid #CCC;">'+ info.replace(/\</g,"&lt;").replace(/\>/g,"&gt;") +'</li>');
- }
-
- function createLog () {
- var pos = $.support.fixedPosition ? 'fixed' : 'absolute'
- , $e = $('<div id="layoutLogger" style="position: '+ pos +'; top: 5px; z-index: 999999; max-width: 25%; overflow: hidden; border: 1px solid #000; border-radius: 5px; background: #FBFBFB; box-shadow: 0 2px 10px rgba(0,0,0,0.3);">'
- + '<div style="font-size: 13px; font-weight: bold; padding: 5px 10px; background: #F6F6F6; border-radius: 5px 5px 0 0; cursor: move;">'
- + '<span style="float: right; padding-left: 7px; cursor: pointer;" title="Remove Console" onclick="$(this).closest(\'#layoutLogger\').remove()">X</span>Layout console.log</div>'
- + '<ul style="font-size: 13px; font-weight: none; list-style: none; margin: 0; padding: 0 0 2px;"></ul>'
- + '</div>'
- ).appendTo("body");
- $e.css('left', $(window).width() - $e.outerWidth() - 5)
- if ($.ui.draggable) $e.draggable({ handle: ':first-child' });
- return $e;
- };
- }
-
-};
-
-var lang = $.layout.language; // alias used in defaults...
-
-// DEFAULT OPTIONS - CHANGE IF DESIRED
-$.layout.defaults = {
-/*
- * LAYOUT & LAYOUT-CONTAINER OPTIONS
- * - none of these options are applicable to individual panes
- */
- name: "" // Not required, but useful for buttons and used for the state-cookie
-, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested
-, containerClass: "ui-layout-container" // layout-container element
-, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark)
-, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event
-, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky
-, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized
-, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific
-, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific
-, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements
-, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized
-, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload
-, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload
-, autoBindCustomButtons: false // search for buttons with ui-layout-button class and auto-bind them
-, initPanes: true // false = DO NOT initialize the panes onLoad - will init later
-, showErrorMessages: true // enables fatal error messages to warn developers of common errors
-, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code!
-// Changing this zIndex value will cause other zIndex values to automatically change
-, zIndex: null // the PANE zIndex - resizers and masks will be +1
-// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships
-, zIndexes: { // set _default_ z-index values here...
- pane_normal: 0 // normal z-index for panes
- , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing
- , resizer_normal: 2 // normal z-index for resizer-bars
- , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open'
- , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer
- , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged'
- }
-/*
- * PANE DEFAULT SETTINGS
- * - settings under the 'panes' key become the default settings for *all panes*
- * - ALL pane-options can also be set specifically for each panes, which will override these 'default values'
- */
-, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings'
- applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity
- , closable: true // pane can open & close
- , resizable: true // when open, pane can be resized
- , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out
- , initClosed: false // true = init pane as 'closed'
- , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing
- // SELECTORS
- //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane
- , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane!
- , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content'
- , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector)
- // GENERIC ROOT-CLASSES - for auto-generated classNames
- , paneClass: "ui-layout-pane" // Layout Pane
- , resizerClass: "ui-layout-resizer" // Resizer Bar
- , togglerClass: "ui-layout-toggler" // Toggler Button
- , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin'
- // ELEMENT SIZE & SPACING
- //, size: 100 // MUST be pane-specific -initial size of pane
- , minSize: 0 // when manually resizing a pane
- , maxSize: 0 // ditto, 0 = no limit
- , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open'
- , spacing_closed: 6 // ditto - when pane is 'closed'
- , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides
- , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden'
- , togglerAlign_open: "center" // top/left, bottom/right, center, OR...
- , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right
- , togglerTip_open: lang.Close // Toggler tool-tip (title)
- , togglerTip_closed: lang.Open // ditto
- , togglerContent_open: "" // text or HTML to put INSIDE the toggler
- , togglerContent_closed: "" // ditto
- // RESIZING OPTIONS
- , resizerDblClickToggle: true //
- , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes
- , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed
- , resizerDragOpacity: 1 // option for ui.draggable
- //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar
- , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES
- , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask
- , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes
- , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20]
- , livePaneResizing: false // true = LIVE Resizing as resizer is dragged
- , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged
- , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance
- // TIPS & MESSAGES - also see lang object
- , noRoomToOpenTip: lang.noRoomToOpenTip
- , resizerTip: lang.Resize // Resizer tool-tip (title)
- , sliderTip: lang.Slide // resizer-bar triggers 'sliding' when pane is closed
- , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding'
- , slideTrigger_open: "click" // click, dblclick, mouseenter
- , slideTrigger_close: "mouseleave"// click, mouseleave
- , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open
- , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!)
- , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show?
- , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening
- , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
- // HOT-KEYS & MISC
- , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver
- , enableCursorHotkey: true // enabled 'cursor' hotkeys
- //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character
- , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT'
- // PANE ANIMATION
- // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed
- , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size'
- , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration
- , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 }
- , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation
- , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called
- /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set:
- fxName_open: "slide" // 'Open' pane animation
- fnName_close: "slide" // 'Close' pane animation
- fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true
- fxSpeed_open: null
- fxSpeed_close: null
- fxSpeed_size: null
- fxSettings_open: {}
- fxSettings_close: {}
- fxSettings_size: {}
- */
- // CHILD/NESTED LAYOUTS
- , childOptions: null // Layout-options for nested/child layout - even {} is valid as options
- , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization
- , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed
- , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized
- // PANE CALLBACKS
- , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes
- , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true
- , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start
- , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end
- , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start
- , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end
- , onopen_start: null // CALLBACK when pane STARTS to Open
- , onopen_end: null // CALLBACK when pane ENDS being Opened
- , onclose_start: null // CALLBACK when pane STARTS to Close
- , onclose_end: null // CALLBACK when pane ENDS being Closed
- , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON***
- , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON***
- , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS
- , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS
- , onswap_start: null // CALLBACK when pane STARTS to Swap
- , onswap_end: null // CALLBACK when pane ENDS being Swapped
- , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized
- , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized
- }
-/*
- * PANE-SPECIFIC SETTINGS
- * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes'
- * - all options under the 'panes' key can also be set specifically for any pane
- * - most options under the 'panes' key apply only to 'border-panes' - NOT the the center-pane
- */
-, north: {
- paneSelector: ".ui-layout-north"
- , size: "auto" // eg: "auto", "30%", .30, 200
- , resizerCursor: "n-resize" // custom = url(myCursor.cur)
- , customHotkey: "" // EITHER a charCode (43) OR a character ("o")
- }
-, south: {
- paneSelector: ".ui-layout-south"
- , size: "auto"
- , resizerCursor: "s-resize"
- , customHotkey: ""
- }
-, east: {
- paneSelector: ".ui-layout-east"
- , size: 200
- , resizerCursor: "e-resize"
- , customHotkey: ""
- }
-, west: {
- paneSelector: ".ui-layout-west"
- , size: 200
- , resizerCursor: "w-resize"
- , customHotkey: ""
- }
-, center: {
- paneSelector: ".ui-layout-center"
- , minWidth: 0
- , minHeight: 0
- }
-};
-
-$.layout.optionsMap = {
- // layout/global options - NOT pane-options
- layout: ("stateManagement,effects,zIndexes,"
- + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages,"
- + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,"
- + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload,autoBindCustomButtons").split(",")
-// borderPanes: [ ALL options that are NOT specified as 'layout' ]
- // default.panes options that apply to the center-pane (most options apply _only_ to border-panes)
-, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad,"
- + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing,"
- + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout,"
- + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",")
- // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key
-, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",")
-};
-
-/**
- * Processes options passed in converts flat-format data into subkey (JSON) format
- * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName
- * Plugins may also call this method so they can transform their own data
- *
- * @param {!Object} hash Data/options passed by user - may be a single level or nested levels
- * @return {Object} Returns hash of minWidth & minHeight
- */
-$.layout.transformData = function (hash) {
- var json = { panes: {}, center: {} } // init return object
- , data, branch, optKey, keys, key, val, i, c;
-
- if (typeof hash !== "object") return json; // no options passed
-
- // convert all 'flat-keys' to 'sub-key' format
- for (optKey in hash) {
- branch = json;
- data = $.layout.optionsMap.layout;
- val = hash[ optKey ];
- keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration
- c = keys.length - 1;
- // convert underscore-delimited to subkeys
- for (i=0; i <= c; i++) {
- key = keys[i];
- if (i === c)
- branch[key] = val;
- else if (!branch[key])
- branch[key] = {}; // create the subkey
- // recurse to sub-key for next loop - if not done
- branch = branch[key];
- }
- }
-
- return json;
-}
-
-// INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
-$.layout.backwardCompatibility = {
- // data used by renameOldOptions()
- map: {
- // OLD Option Name: NEW Option Name
- applyDefaultStyles: "applyDemoStyles"
- , resizeNestedLayout: "resizeChildLayout"
- , resizeWhileDragging: "livePaneResizing"
- , resizeContentWhileDragging: "liveContentResizing"
- , triggerEventsWhileDragging: "triggerEventsDuringLiveResize"
- , maskIframesOnResize: "maskContents"
- , useStateCookie: "stateManagement.enabled"
- , "cookie.autoLoad": "stateManagement.autoLoad"
- , "cookie.autoSave": "stateManagement.autoSave"
- , "cookie.keys": "stateManagement.stateKeys"
- , "cookie.name": "stateManagement.cookie.name"
- , "cookie.domain": "stateManagement.cookie.domain"
- , "cookie.path": "stateManagement.cookie.path"
- , "cookie.expires": "stateManagement.cookie.expires"
- , "cookie.secure": "stateManagement.cookie.secure"
- }
- /**
- * @param {Object} opts
- */
-, renameOptions: function (opts) {
- var map = $.layout.backwardCompatibility.map
- , oldData, newData, value
- ;
- for (var itemPath in map) {
- oldData = getBranch( itemPath );
- value = oldData.branch[ oldData.key ]
- if (value !== undefined) {
- newData = getBranch( map[itemPath], true )
- newData.branch[ newData.key ] = value;
- delete oldData.branch[ oldData.key ];
- }
- }
-
- /**
- * @param {string} path
- * @param {boolean=} [create=false] Create path if does not exist
- */
- function getBranch (path, create) {
- var a = path.split(".") // split keys into array
- , c = a.length - 1
- , D = { branch: opts, key: a[c] } // init branch at top & set key (last item)
- , i = 0, k, undef;
- for (; i<c; i++) { // skip the last key (data)
- k = a[i];
- if (D.branch[ k ] == undefined) { // child-key does not exist
- if (create) {
- D.branch = D.branch[ k ] = {}; // create child-branch
- }
- else // can't go any farther
- D.branch = {}; // branch is undefined
- }
- else
- D.branch = D.branch[ k ]; // get child-branch
- }
- return D;
- };
- }
- /**
- * @param {Object} opts
- */
-, renameAllOptions: function (opts) {
- var ren = $.layout.backwardCompatibility.renameOptions;
- // rename root (layout) options
- ren( opts );
- // rename 'defaults' to 'panes'
- if (opts.defaults) {
- if (typeof opts.panes !== "object")
- opts.panes = {};
- $.extend(true, opts.panes, opts.defaults);
- delete opts.defaults;
- }
- // rename options in the the options.panes key
- if (opts.panes) ren( opts.panes );
- // rename options inside *each pane key*, eg: options.west
- $.each($.layout.config.allPanes, function (i, pane) {
- if (opts[pane]) ren( opts[pane] );
- });
- return opts;
- }
-};
-
-
-
-/* ============================================================
- * BEGIN WIDGET: $( selector ).layout( {options} );
- * ============================================================
- */
-$.fn.layout = function (opts) {
- var
-
- // local aliases to global data
- browser = $.layout.browser
-, lang = $.layout.language // internal alias
-, _c = $.layout.config
-
- // local aliases to utlity methods
-, cssW = $.layout.cssWidth
-, cssH = $.layout.cssHeight
-, elDims = $.layout.getElementDimensions
-, elCSS = $.layout.getElementCSS
-, evtObj = $.layout.getEventObject
-, evtPane = $.layout.parsePaneName
-
-/**
- * options - populated by initOptions()
- */
-, options = $.extend(true, {}, $.layout.defaults)
-, effects = options.effects = $.extend(true, {}, $.layout.effects)
-
-/**
- * layout-state object
- */
-, state = {
- // generate unique ID to use for event.namespace so can unbind only events added by 'this layout'
- id: "layout"+ $.now() // code uses alias: sID
- , initialized: false
- , container: {} // init all keys
- , north: {}
- , south: {}
- , east: {}
- , west: {}
- , center: {}
- }
-
-/**
- * parent/child-layout pointers
- */
-//, hasParentLayout = false - exists ONLY inside Instance so can be set externally
-, children = {
- north: null
- , south: null
- , east: null
- , west: null
- , center: null
- }
-
-/*
- * ###########################
- * INTERNAL HELPER FUNCTIONS
- * ###########################
- */
-
- /**
- * Manages all internal timers
- */
-, timer = {
- data: {}
- , set: function (s, fn, ms) { timer.clear(s); timer.data[s] = setTimeout(fn, ms); }
- , clear: function (s) { var t=timer.data; if (t[s]) {clearTimeout(t[s]); delete t[s];} }
- }
-
-, _log = function (msg, popup) {
- $.layout.msg( options.name +' / '+ msg, (popup && options.showErrorMessages) );
- }
-
- /**
- * Executes a Callback function after a trigger event, like resize, open or close
- *
- * @param {?string} pane This is passed only so we can pass the 'pane object' to the callback
- * @param {(string|function())} fn Accepts a function name, OR a comma-delimited array: [0]=function name, [1]=argument
- */
-, _runCallbacks = function (evtName, pane, skipBoundEvents) {
- var o = pane ? options[pane] : options
- // names like onopen and onopen_end separate are interchangeable in options...
- , long = evtName + (evtName.match(/_/) ? "" : "_end")
- , short = long.match(/_end$/) ? long.substr(0, long.length - 4) : ""
- , fn = o[long]
- , retVal = "NC" // NC = No Callback
- , args = []
- ;
- if (!fn && short)
- fn = o[short];
-
- // first trigger the callback set in the options
- if (fn) {
- //try {
- // convert function name (string) to function object
- if (isStr( fn )) {
- if (fn.match(/,/)) {
- // function name cannot contain a comma,
- // so must be a function name AND a parameter to pass
- args = fn.split(",")
- , fn = eval(args[0]);
- }
- else // just the name of an external function?
- fn = eval(fn);
- }
- // execute the callback, if exists
- if ($.isFunction( fn )) {
- if (args.length)
- retVal = fn(args[1]); // pass the argument parsed from 'list'
- else if (pane && $Ps[pane])
- // pass data: pane-name, pane-element, pane-state, pane-options, and layout-name
- retVal = fn( pane, $Ps[pane], state[pane], options[pane], options.name );
- else // must be a layout/container callback - pass suitable info
- retVal = fn( Instance, state, options, options.name );
- }
- //}
- //catch (ex) {}
- }
-
- // trigger additional events bound directly to the pane
- if (!skipBoundEvents && retVal !== false) {
- if (pane) { // PANE events can be bound to each pane-elements
- $Ps[pane].triggerHandler('layoutpane'+ long, [ pane, $Ps[pane], state[pane], options[pane], options.name ]);
- if (short)
- $Ps[pane].triggerHandler('layoutpane'+ short, [ pane, $Ps[pane], state[pane], options[pane], options.name ]);
- }
- else // LAYOUT events can be bound to the container-element
- $N.triggerHandler('layout'+ long, [ pane, $Ps[pane], state[pane], options[pane], options.name ]);
- }
-
- // ALWAYS resizeChildLayout after a resize event - even during initialization
- if (evtName === "onresize_end" || evtName === "onsizecontent_end")
- resizeChildLayout(pane);
-
- return retVal;
- }
-
-
- /**
- * cure iframe display issues in IE & other browsers
- */
-, _fixIframe = function (pane) {
- if (browser.mozilla) return; // skip FireFox - it auto-refreshes iframes onShow
- var $P = $Ps[pane];
- // if the 'pane' is an iframe, do it
- if (state[pane].tagName === "IFRAME")
- $P.css(_c.hidden).css(_c.visible);
- else // ditto for any iframes INSIDE the pane
- $P.find('IFRAME').css(_c.hidden).css(_c.visible);
- }
-
- /**
- * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
- * @param {number=} outerSize (optional) Can pass a width, allowing calculations BEFORE element is resized
- * @return {number} Returns the innerHeight/Width of el by subtracting padding and borders
- */
-, cssSize = function (pane, outerSize) {
- var fn = _c[pane].dir=="horz" ? cssH : cssW;
- return fn($Ps[pane], outerSize);
- }
-
- /**
- * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
- * @return {Object} Returns hash of minWidth & minHeight
- */
-, cssMinDims = function (pane) {
- // minWidth/Height means CSS width/height = 1px
- var $P = $Ps[pane]
- , dir = _c[pane].dir
- , d = {
- minWidth: 1001 - cssW($P, 1000)
- , minHeight: 1001 - cssH($P, 1000)
- }
- ;
- if (dir === "horz") d.minSize = d.minHeight;
- if (dir === "vert") d.minSize = d.minWidth;
- return d;
- }
-
- // TODO: see if these methods can be made more useful...
- // TODO: *maybe* return cssW/H from these so caller can use this info
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerWidth
- * @param {boolean=} [autoHide=false]
- */
-, setOuterWidth = function (el, outerWidth, autoHide) {
- var $E = el, w;
- if (isStr(el)) $E = $Ps[el]; // west
- else if (!el.jquery) $E = $(el);
- w = cssW($E, outerWidth);
- $E.css({ width: w });
- if (w > 0) {
- if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) {
- $E.show().data('autoHidden', false);
- if (!browser.mozilla) // FireFox refreshes iframes - IE does not
- // make hidden, then visible to 'refresh' display after animation
- $E.css(_c.hidden).css(_c.visible);
- }
- }
- else if (autoHide && !$E.data('autoHidden'))
- $E.hide().data('autoHidden', true);
- }
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerHeight
- * @param {boolean=} [autoHide=false]
- */
-, setOuterHeight = function (el, outerHeight, autoHide) {
- var $E = el, h;
- if (isStr(el)) $E = $Ps[el]; // west
- else if (!el.jquery) $E = $(el);
- h = cssH($E, outerHeight);
- $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent
- if (h > 0 && $E.innerWidth() > 0) {
- if (autoHide && $E.data('autoHidden')) {
- $E.show().data('autoHidden', false);
- if (!browser.mozilla) // FireFox refreshes iframes - IE does not
- $E.css(_c.hidden).css(_c.visible);
- }
- }
- else if (autoHide && !$E.data('autoHidden'))
- $E.hide().data('autoHidden', true);
- }
-
- /**
- * @param {(string|!Object)} el
- * @param {number=} outerSize
- * @param {boolean=} [autoHide=false]
- */
-, setOuterSize = function (el, outerSize, autoHide) {
- if (_c[pane].dir=="horz") // pane = north or south
- setOuterHeight(el, outerSize, autoHide);
- else // pane = east or west
- setOuterWidth(el, outerSize, autoHide);
- }
-
-
- /**
- * Converts any 'size' params to a pixel/integer size, if not already
- * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated
- *
- /**
- * @param {string} pane
- * @param {(string|number)=} size
- * @param {string=} [dir]
- * @return {number}
- */
-, _parseSize = function (pane, size, dir) {
- if (!dir) dir = _c[pane].dir;
-
- if (isStr(size) && size.match(/%/))
- size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal
-
- if (size === 0)
- return 0;
- else if (size >= 1)
- return parseInt(size, 10);
-
- var o = options, avail = 0;
- if (dir=="horz") // north or south or center.minHeight
- avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0);
- else if (dir=="vert") // east or west or center.minWidth
- avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0);
-
- if (size === -1) // -1 == 100%
- return avail;
- else if (size > 0) // percentage, eg: .25
- return round(avail * size);
- else if (pane=="center")
- return 0;
- else { // size < 0 || size=='auto' || size==Missing || size==Invalid
- // auto-size the pane
- var dim = (dir === "horz" ? "height" : "width")
- , $P = $Ps[pane]
- , $C = dim === 'height' ? $Cs[pane] : false
- , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden
- , szP = $P.css(dim) // SAVE current pane size
- , szC = $C ? $C.css(dim) : 0 // SAVE current content size
- ;
- $P.css(dim, "auto");
- if ($C) $C.css(dim, "auto");
- size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE
- $P.css(dim, szP).css(vis); // RESET size & visibility
- if ($C) $C.css(dim, szC);
- return size;
- }
- }
-
- /**
- * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added
- *
- * @param {(string|!Object)} pane
- * @param {boolean=} [inclSpace=false]
- * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes - adjusted for boxModel & browser
- */
-, getPaneSize = function (pane, inclSpace) {
- var
- $P = $Ps[pane]
- , o = options[pane]
- , s = state[pane]
- , oSp = (inclSpace ? o.spacing_open : 0)
- , cSp = (inclSpace ? o.spacing_closed : 0)
- ;
- if (!$P || s.isHidden)
- return 0;
- else if (s.isClosed || (s.isSliding && inclSpace))
- return cSp;
- else if (_c[pane].dir === "horz")
- return $P.outerHeight() + oSp;
- else // dir === "vert"
- return $P.outerWidth() + oSp;
- }
-
- /**
- * Calculate min/max pane dimensions and limits for resizing
- *
- * @param {string} pane
- * @param {boolean=} [slide=false]
- */
-, setSizeLimits = function (pane, slide) {
- if (!isInitialized()) return;
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , dir = c.dir
- , side = c.side.toLowerCase()
- , type = c.sizeType.toLowerCase()
- , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param
- , $P = $Ps[pane]
- , paneSpacing = o.spacing_open
- // measure the pane on the *opposite side* from this pane
- , altPane = _c.oppositeEdge[pane]
- , altS = state[altPane]
- , $altP = $Ps[altPane]
- , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth()))
- , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0)
- // limitSize prevents this pane from 'overlapping' opposite pane
- , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth)
- , minCenterDims = cssMinDims("center")
- , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth)
- // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them
- , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing)))
- , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize )
- , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize )
- , r = s.resizerPosition = {} // used to set resizing limits
- , top = sC.insetTop
- , left = sC.insetLeft
- , W = sC.innerWidth
- , H = sC.innerHeight
- , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east
- ;
- switch (pane) {
- case "north": r.min = top + minSize;
- r.max = top + maxSize;
- break;
- case "west": r.min = left + minSize;
- r.max = left + maxSize;
- break;
- case "south": r.min = top + H - maxSize - rW;
- r.max = top + H - minSize - rW;
- break;
- case "east": r.min = left + W - maxSize - rW;
- r.max = left + W - minSize - rW;
- break;
- };
- }
-
- /**
- * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes
- *
- * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height
- */
-, calcNewCenterPaneDims = function () {
- var d = {
- top: getPaneSize("north", true) // true = include 'spacing' value for pane
- , bottom: getPaneSize("south", true)
- , left: getPaneSize("west", true)
- , right: getPaneSize("east", true)
- , width: 0
- , height: 0
- };
-
- // NOTE: sC = state.container
- // calc center-pane outer dimensions
- d.width = sC.innerWidth - d.left - d.right; // outerWidth
- d.height = sC.innerHeight - d.bottom - d.top; // outerHeight
- // add the 'container border/padding' to get final positions relative to the container
- d.top += sC.insetTop;
- d.bottom += sC.insetBottom;
- d.left += sC.insetLeft;
- d.right += sC.insetRight;
-
- return d;
- }
-
-
- /**
- * @param {!Object} el
- * @param {boolean=} [allStates=false]
- */
-, getHoverClasses = function (el, allStates) {
- var
- $El = $(el)
- , type = $El.data("layoutRole")
- , pane = $El.data("layoutEdge")
- , o = options[pane]
- , root = o[type +"Class"]
- , _pane = "-"+ pane // eg: "-west"
- , _open = "-open"
- , _closed = "-closed"
- , _slide = "-sliding"
- , _hover = "-hover " // NOTE the trailing space
- , _state = $El.hasClass(root+_closed) ? _closed : _open
- , _alt = _state === _closed ? _open : _closed
- , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover)
- ;
- if (allStates) // when 'removing' classes, also remove alternate-state classes
- classes += (root+_alt+_hover) + (root+_pane+_alt+_hover);
-
- if (type=="resizer" && $El.hasClass(root+_slide))
- classes += (root+_slide+_hover) + (root+_pane+_slide+_hover);
-
- return $.trim(classes);
- }
-, addHover = function (evt, el) {
- var $E = $(el || this);
- if (evt && $E.data("layoutRole") === "toggler")
- evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar
- $E.addClass( getHoverClasses($E) );
- }
-, removeHover = function (evt, el) {
- var $E = $(el || this);
- $E.removeClass( getHoverClasses($E, true) );
- }
-
-, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter
- if ($.fn.disableSelection)
- $("body").disableSelection();
- }
-, onResizerLeave = function (evt, el) {
- var
- e = el || this // el is only passed when called by the timer
- , pane = $(e).data("layoutEdge")
- , name = pane +"ResizerLeave"
- ;
- timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set
- timer.clear(name); // cancel enableSelection timer - may re/set below
- // this method calls itself on a timer because it needs to allow
- // enough time for dragging to kick-in and set the isResizing flag
- // dragging has a 100ms delay set, so this delay must be >100
- if (!el) // 1st call - mouseleave event
- timer.set(name, function(){ onResizerLeave(evt, e); }, 200);
- // if user is resizing, then dragStop will enableSelection(), so can skip it here
- else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer
- $("body").enableSelection();
- }
-
-/*
- * ###########################
- * INITIALIZATION METHODS
- * ###########################
- */
-
- /**
- * Initialize the layout - called automatically whenever an instance of layout is created
- *
- * @see none - triggered onInit
- * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort
- */
-, _create = function () {
- // initialize config/options
- initOptions();
- var o = options;
-
- // TEMP state so isInitialized returns true during init process
- state.creatingLayout = true;
-
- // init plugins for this layout, if there are any (eg: stateManagement)
- runPluginCallbacks( Instance, $.layout.onCreate );
-
- // options & state have been initialized, so now run beforeLoad callback
- // onload will CANCEL layout creation if it returns false
- if (false === _runCallbacks("onload_start"))
- return 'cancel';
-
- // initialize the container element
- _initContainer();
-
- // bind hotkey function - keyDown - if required
- initHotkeys();
-
- // bind window.onunload
- $(window).bind("unload."+ sID, unload);
-
- // init plugins for this layout, if there are any (eg: customButtons)
- runPluginCallbacks( Instance, $.layout.onLoad );
-
- // if layout elements are hidden, then layout WILL NOT complete initialization!
- // initLayoutElements will set initialized=true and run the onload callback IF successful
- if (o.initPanes) _initLayoutElements();
-
- delete state.creatingLayout;
-
- return state.initialized;
- }
-
- /**
- * Initialize the layout IF not already
- *
- * @see All methods in Instance run this test
- * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet)
- */
-, isInitialized = function () {
- if (state.initialized || state.creatingLayout) return true; // already initialized
- else return _initLayoutElements(); // try to init panes NOW
- }
-
- /**
- * Initialize the layout - called automatically whenever an instance of layout is created
- *
- * @see _create() & isInitialized
- * @return An object pointer to the instance created
- */
-, _initLayoutElements = function (retry) {
- // initialize config/options
- var o = options;
-
- // CANNOT init panes inside a hidden container!
- if (!$N.is(":visible")) {
- // handle Chrome bug where popup window 'has no height'
- // if layout is BODY element, try again in 50ms
- // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html
- if ( !retry && browser.webkit && $N[0].tagName === "BODY" )
- setTimeout(function(){ _initLayoutElements(true); }, 50);
- return false;
- }
-
- // a center pane is required, so make sure it exists
- if (!getPane("center").length) {
- if (options.showErrorMessages)
- _log( lang.errCenterPaneMissing, true );
- return false;
- }
-
- // TEMP state so isInitialized returns true during init process
- state.creatingLayout = true;
-
- // update Container dims
- $.extend(sC, elDims( $N ));
-
- // initialize all layout elements
- initPanes(); // size & position panes - calls initHandles() - which calls initResizable()
-
- if (o.scrollToBookmarkOnLoad) {
- var l = self.location;
- if (l.hash) l.replace( l.hash ); // scrollTo Bookmark
- }
-
- // check to see if this layout 'nested' inside a pane
- if (Instance.hasParentLayout)
- o.resizeWithWindow = false;
- // bind resizeAll() for 'this layout instance' to window.resize event
- else if (o.resizeWithWindow)
- $(window).bind("resize."+ sID, windowResize);
-
- delete state.creatingLayout;
- state.initialized = true;
-
- // init plugins for this layout, if there are any
- runPluginCallbacks( Instance, $.layout.onReady );
-
- // now run the onload callback, if exists
- _runCallbacks("onload_end");
-
- return true; // elements initialized successfully
- }
-
- /**
- * Initialize nested layouts - called when _initLayoutElements completes
- *
- * NOT CURRENTLY USED
- *
- * @see _initLayoutElements
- * @return An object pointer to the instance created
- */
-, _initChildLayouts = function () {
- $.each(_c.allPanes, function (idx, pane) {
- if (options[pane].initChildLayout)
- createChildLayout( pane );
- });
- }
-
- /**
- * Initialize nested layouts for a specific pane - can optionally pass layout-options
- *
- * @see _initChildLayouts
- * @param {string} pane The pane being opened, ie: north, south, east, or west
- * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions
- * @return An object pointer to the layout instance created - or null
- */
-, createChildLayout = function (evt_or_pane, opts) {
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , C = children
- ;
- if ($P) {
- var $C = $Cs[pane]
- , o = opts || options[pane].childOptions
- , d = "layout"
- // determine which element is supposed to be the 'child container'
- // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane
- , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P)
- , containerFound = $Cont.length
- // see if a child-layout ALREADY exists on this element
- , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null
- ;
- // if no layout exists, but childOptions are set, try to create the layout now
- if (!child && containerFound && o)
- child = C[pane] = $Cont.eq(0).layout(o) || null;
- if (child)
- child.hasParentLayout = true; // set parent-flag in child
- }
- Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null
- }
-
-, windowResize = function () {
- var delay = Number(options.resizeWithWindowDelay);
- if (delay < 10) delay = 100; // MUST have a delay!
- // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway
- timer.clear("winResize"); // if already running
- timer.set("winResize", function(){
- timer.clear("winResize");
- timer.clear("winResizeRepeater");
- var dims = elDims( $N );
- // only trigger resizeAll() if container has changed size
- if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight)
- resizeAll();
- }, delay);
- // ALSO set fixed-delay timer, if not already running
- if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater();
- }
-
-, setWindowResizeRepeater = function () {
- var delay = Number(options.resizeWithWindowMaxDelay);
- if (delay > 0)
- timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay);
- }
-
-, unload = function () {
- var o = options;
-
- _runCallbacks("onunload_start");
-
- // trigger plugin callabacks for this layout (eg: stateManagement)
- runPluginCallbacks( Instance, $.layout.onUnload );
-
- _runCallbacks("onunload_end");
- }
-
- /**
- * Validate and initialize container CSS and events
- *
- * @see _create()
- */
-, _initContainer = function () {
- var
- N = $N[0]
- , tag = sC.tagName = N.tagName
- , id = sC.id = N.id
- , cls = sC.className = N.className
- , o = options
- , name = o.name
- , fullPage= (tag === "BODY")
- , props = "overflow,position,margin,padding,border"
- , css = "layoutCSS"
- , CSS = {}
- , hid = "hidden" // used A LOT!
- // see if this container is a 'pane' inside an outer-layout
- , parent = $N.data("parentLayout") // parent-layout Instance
- , pane = $N.data("layoutEdge") // pane-name in parent-layout
- , isChild = parent && pane
- ;
- // sC -> state.container
- sC.selector = $N.selector.split(".slice")[0];
- sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages
-
- $N .data({
- layout: Instance
- , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID
- })
- .addClass(o.containerClass)
- ;
- var layoutMethods = {
- destroy: ''
- , initPanes: ''
- , resizeAll: 'resizeAll'
- , resize: 'resizeAll'
- }
- , name;
- // loop hash and bind all methods - include layoutID namespacing
- for (name in layoutMethods) {
- $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]);
- }
-
- // if this container is another layout's 'pane', then set child/parent pointers
- if (isChild) {
- // update parent flag
- Instance.hasParentLayout = true;
- // set pointers to THIS child-layout (Instance) in parent-layout
- // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE
- parent[pane].child = parent.children[pane] = $N.data("layout");
- }
-
- // SAVE original container CSS for use in destroy()
- if (!$N.data(css)) {
- // handle props like overflow different for BODY & HTML - has 'system default' values
- if (fullPage) {
- CSS = $.extend( elCSS($N, props), {
- height: $N.css("height")
- , overflow: $N.css("overflow")
- , overflowX: $N.css("overflowX")
- , overflowY: $N.css("overflowY")
- });
- // ALSO SAVE <HTML> CSS
- var $H = $("html");
- $H.data(css, {
- height: "auto" // FF would return a fixed px-size!
- , overflow: $H.css("overflow")
- , overflowX: $H.css("overflowX")
- , overflowY: $H.css("overflowY")
- });
- }
- else // handle props normally for non-body elements
- CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY");
-
- $N.data(css, CSS);
- }
-
- try { // format html/body if this is a full page layout
- if (fullPage) {
- $("html").css({
- height: "100%"
- , overflow: hid
- , overflowX: hid
- , overflowY: hid
- });
- $("body").css({
- position: "relative"
- , height: "100%"
- , overflow: hid
- , overflowX: hid
- , overflowY: hid
- , margin: 0
- , padding: 0 // TODO: test whether body-padding could be handled?
- , border: "none" // a body-border creates problems because it cannot be measured!
- });
-
- // set current layout-container dimensions
- $.extend(sC, elDims( $N ));
- }
- else { // set required CSS for overflow and position
- // ENSURE container will not 'scroll'
- CSS = { overflow: hid, overflowX: hid, overflowY: hid }
- var
- p = $N.css("position")
- , h = $N.css("height")
- ;
- // if this is a NESTED layout, then container/outer-pane ALREADY has position and height
- if (!isChild) {
- if (!p || !p.match(/fixed|absolute|relative/))
- CSS.position = "relative"; // container MUST have a 'position'
- /*
- if (!h || h=="auto")
- CSS.height = "100%"; // container MUST have a 'height'
- */
- }
- $N.css( CSS );
-
- // set current layout-container dimensions
- if ( $N.is(":visible") ) {
- $.extend(sC, elDims( $N ));
- if (o.showErrorMessages && sC.innerHeight < 1)
- _log( lang.errContainerHeight.replace(/CONTAINER/, sC.ref), true );
- }
- }
- } catch (ex) {}
- }
-
- /**
- * Bind layout hotkeys - if options enabled
- *
- * @see _create() and addPane()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initHotkeys = function (panes) {
- panes = panes ? panes.split(",") : _c.borderPanes;
- // bind keyDown to capture hotkeys, if option enabled for ANY pane
- $.each(panes, function (i, pane) {
- var o = options[pane];
- if (o.enableCursorHotkey || o.customHotkey) {
- $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE
- return false; // BREAK - binding was done
- }
- });
- }
-
- /**
- * Build final OPTIONS data
- *
- * @see _create()
- */
-, initOptions = function () {
- var data, d, pane, key, val, i, c, o;
-
- // reprocess user's layout-options to have correct options sub-key structure
- opts = $.layout.transformData( opts ); // panes = default subkey
-
- // auto-rename old options for backward compatibility
- opts = $.layout.backwardCompatibility.renameAllOptions( opts );
-
- // if user-options has 'panes' key (pane-defaults), process it...
- if (!$.isEmptyObject(opts.panes)) {
- // REMOVE any pane-defaults that MUST be set per-pane
- data = $.layout.optionsMap.noDefault;
- for (i=0, c=data.length; i<c; i++) {
- key = data[i];
- delete opts.panes[key]; // OK if does not exist
- }
- // REMOVE any layout-options specified under opts.panes
- data = $.layout.optionsMap.layout;
- for (i=0, c=data.length; i<c; i++) {
- key = data[i];
- delete opts.panes[key]; // OK if does not exist
- }
- }
-
- // MOVE any NON-layout-options to opts.panes
- data = $.layout.optionsMap.layout;
- var rootKeys = $.layout.config.optionRootKeys;
- for (key in opts) {
- val = opts[key];
- if ($.inArray(key, rootKeys) < 0 && $.inArray(key, data) < 0) {
- if (!opts.panes[key])
- opts.panes[key] = $.isPlainObject(val) ? $.extend(true, {}, val) : val;
- delete opts[key]
- }
- }
-
- // START by updating ALL options from opts
- $.extend(true, options, opts);
-
- // CREATE final options (and config) for EACH pane
- $.each(_c.allPanes, function (i, pane) {
-
- // apply 'pane-defaults' to CONFIG.[PANE]
- _c[pane] = $.extend( true, {}, _c.panes, _c[pane] );
-
- d = options.panes;
- o = options[pane];
-
- // center-pane uses SOME keys in defaults.panes branch
- if (pane === 'center') {
- // ONLY copy keys from opts.panes listed in: $.layout.optionsMap.center
- data = $.layout.optionsMap.center; // list of 'center-pane keys'
- for (i=0, c=data.length; i<c; i++) { // loop the list...
- key = data[i];
- // only need to use pane-default if pane-specific value not set
- if (!opts.center[key] && (opts.panes[key] || !o[key]))
- o[key] = d[key]; // pane-default
- }
- }
- else {
- // border-panes use ALL keys in defaults.panes branch
- o = options[pane] = $.extend({}, d, o); // re-apply pane-specific opts AFTER pane-defaults
- createFxOptions( pane );
- // ensure all border-pane-specific base-classes exist
- if (!o.resizerClass) o.resizerClass = "ui-layout-resizer";
- if (!o.togglerClass) o.togglerClass = "ui-layout-toggler";
- }
- // ensure we have base pane-class (ALL panes)
- if (!o.paneClass) o.paneClass = "ui-layout-pane";
- });
-
- // update options.zIndexes if a zIndex-option specified
- var zo = opts.zIndex
- , z = options.zIndexes;
- if (zo > 0) {
- z.pane_normal = zo;
- z.content_mask = max(zo+1, z.content_mask); // MIN = +1
- z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2
- }
-
- function createFxOptions ( pane ) {
- var o = options[pane]
- , d = options.panes;
- // ensure fxSettings key to avoid errors
- if (!o.fxSettings) o.fxSettings = {};
- if (!d.fxSettings) d.fxSettings = {};
-
- $.each(["_open","_close","_size"], function (i,n) {
- var
- sName = "fxName"+ n
- , sSpeed = "fxSpeed"+ n
- , sSettings = "fxSettings"+ n
- // recalculate fxName according to specificity rules
- , fxName = o[sName] =
- o[sName] // options.west.fxName_open
- || d[sName] // options.panes.fxName_open
- || o.fxName // options.west.fxName
- || d.fxName // options.panes.fxName
- || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0
- ;
- // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects
- if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName])
- fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName
-
- // set vars for effects subkeys to simplify logic
- var fx = options.effects[fxName] || {} // effects.slide
- , fx_all = fx.all || null // effects.slide.all
- , fx_pane = fx[pane] || null // effects.slide.west
- ;
- // create fxSpeed[_open|_close|_size]
- o[sSpeed] =
- o[sSpeed] // options.west.fxSpeed_open
- || d[sSpeed] // options.west.fxSpeed_open
- || o.fxSpeed // options.west.fxSpeed
- || d.fxSpeed // options.panes.fxSpeed
- || null // DEFAULT - let fxSetting.duration control speed
- ;
- // create fxSettings[_open|_close|_size]
- o[sSettings] = $.extend(
- {}
- , fx_all // effects.slide.all
- , fx_pane // effects.slide.west
- , d.fxSettings // options.panes.fxSettings
- , o.fxSettings // options.west.fxSettings
- , d[sSettings] // options.panes.fxSettings_open
- , o[sSettings] // options.west.fxSettings_open
- );
- });
-
- // DONE creating action-specific-settings for this pane,
- // so DELETE generic options - are no longer meaningful
- delete o.fxName;
- delete o.fxSpeed;
- delete o.fxSettings;
- }
-
- // DELETE 'panes' key now that we are done - values were copied to EACH pane
- delete options.panes;
- }
-
- /**
- * Initialize module objects, styling, size and position for all panes
- *
- * @see _initElements()
- * @param {string} pane The pane to process
- */
-, getPane = function (pane) {
- var sel = options[pane].paneSelector
- if (sel.substr(0,1)==="#") // ID selector
- // NOTE: elements selected 'by ID' DO NOT have to be 'children'
- return $N.find(sel).eq(0);
- else { // class or other selector
- var $P = $N.children(sel).eq(0);
- // look for the pane nested inside a 'form' element
- return $P.length ? $P : $N.children("form:first").children(sel).eq(0);
- }
- }
-
-, initPanes = function () {
- // NOTE: do north & south FIRST so we can measure their height - do center LAST
- $.each(_c.allPanes, function (idx, pane) {
- addPane( pane, true );
- });
-
- // init the pane-handles NOW in case we have to hide or close the pane below
- initHandles();
-
- // now that all panes have been initialized and initially-sized,
- // make sure there is really enough space available for each pane
- $.each(_c.borderPanes, function (i, pane) {
- if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN
- setSizeLimits(pane);
- makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit()
- }
- });
- // size center-pane AGAIN in case we 'closed' a border-pane in loop above
- sizeMidPanes("center");
-
- // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing!
- // Before RC30.3, there was a 10ms delay here, but that caused layout
- // to load asynchrously, which is BAD, so try skipping delay for now
-
- // process pane contents and callbacks, and init/resize child-layout if exists
- $.each(_c.allPanes, function (i, pane) {
- var o = options[pane];
- if ($Ps[pane]) {
- if (state[pane].isVisible) { // pane is OPEN
- sizeContent(pane);
- // trigger pane.onResize if triggerEventsOnLoad = true
- if (o.triggerEventsOnLoad)
- _runCallbacks("onresize_end", pane);
- else // automatic if onresize called, otherwise call it specifically
- // resize child - IF inner-layout already exists (created before this layout)
- resizeChildLayout(pane);
- }
- // init childLayout - even if pane is not visible
- if (o.initChildLayout && o.childOptions)
- createChildLayout(pane);
- }
- });
- }
-
- /**
- * Add a pane to the layout - subroutine of initPanes()
- *
- * @see initPanes()
- * @param {string} pane The pane to process
- * @param {boolean=} [force=false] Size content after init
- */
-, addPane = function (pane, force) {
- if (!force && !isInitialized()) return;
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , fx = s.fx
- , dir = c.dir
- , spacing = o.spacing_open || 0
- , isCenter = (pane === "center")
- , CSS = {}
- , $P = $Ps[pane]
- , size, minSize, maxSize
- ;
- // if pane-pointer already exists, remove the old one first
- if ($P)
- removePane( pane, false, true, false );
- else
- $Cs[pane] = false; // init
-
- $P = $Ps[pane] = getPane(pane);
- if (!$P.length) {
- $Ps[pane] = false; // logic
- return;
- }
-
- // SAVE original Pane CSS
- if (!$P.data("layoutCSS")) {
- var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";
- $P.data("layoutCSS", elCSS($P, props));
- }
-
- // create alias for pane data in Instance - initHandles will add more
- Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] };
-
- // add classes, attributes & events
- $P .data({
- parentLayout: Instance // pointer to Layout Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "pane"
- })
- .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal)
- .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles
- .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector'
- .bind("mouseenter."+ sID, addHover )
- .bind("mouseleave."+ sID, removeHover )
- ;
- var paneMethods = {
- hide: ''
- , show: ''
- , toggle: ''
- , close: ''
- , open: ''
- , slideOpen: ''
- , slideClose: ''
- , slideToggle: ''
- , size: 'manualSizePane'
- , sizePane: 'manualSizePane'
- , sizeContent: ''
- , sizeHandles: ''
- , enableClosable: ''
- , disableClosable: ''
- , enableSlideable: ''
- , disableSlideable: ''
- , enableResizable: ''
- , disableResizable: ''
- , swapPanes: 'swapPanes'
- , swap: 'swapPanes'
- , move: 'swapPanes'
- , removePane: 'removePane'
- , remove: 'removePane'
- , createChildLayout: ''
- , resizeChildLayout: ''
- , resizeAll: 'resizeAll'
- , resizeLayout: 'resizeAll'
- }
- , name;
- // loop hash and bind all methods - include layoutID namespacing
- for (name in paneMethods) {
- $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]);
- }
-
- // see if this pane has a 'scrolling-content element'
- initContent(pane, false); // false = do NOT sizeContent() - called later
-
- if (!isCenter) {
- // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden)
- // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size'
- size = s.size = _parseSize(pane, o.size);
- minSize = _parseSize(pane,o.minSize) || 1;
- maxSize = _parseSize(pane,o.maxSize) || 100000;
- if (size > 0) size = max(min(size, maxSize), minSize);
-
- // state for border-panes
- s.isClosed = false; // true = pane is closed
- s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes
- s.isResizing= false; // true = pane is in process of being resized
- s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible!
-
- // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close
- if (!s.pins) s.pins = [];
- }
- // states common to ALL panes
- s.tagName = $P[0].tagName;
- s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going)
- s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically
- s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic
-
- // set css-position to account for container borders & padding
- switch (pane) {
- case "north": CSS.top = sC.insetTop;
- CSS.left = sC.insetLeft;
- CSS.right = sC.insetRight;
- break;
- case "south": CSS.bottom = sC.insetBottom;
- CSS.left = sC.insetLeft;
- CSS.right = sC.insetRight;
- break;
- case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes()
- break;
- case "east": CSS.right = sC.insetRight; // ditto
- break;
- case "center": // top, left, width & height set by sizeMidPanes()
- }
-
- if (dir === "horz") // north or south pane
- CSS.height = cssH($P, size);
- else if (dir === "vert") // east or west pane
- CSS.width = cssW($P, size);
- //else if (isCenter) {}
-
- $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes
- if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback
-
- // close or hide the pane if specified in settings
- if (o.initClosed && o.closable && !o.initHidden)
- close(pane, true, true); // true, true = force, noAnimation
- else if (o.initHidden || o.initClosed)
- hide(pane); // will be completely invisible - no resizer or spacing
- else if (!s.noRoom)
- // make the pane visible - in case was initially hidden
- $P.css("display","block");
- // ELSE setAsOpen() - called later by initHandles()
-
- // RESET visibility now - pane will appear IF display:block
- $P.css("visibility","visible");
-
- // check option for auto-handling of pop-ups & drop-downs
- if (o.showOverflowOnHover)
- $P.hover( allowOverflow, resetOverflow );
-
- // if manually adding a pane AFTER layout initialization, then...
- if (state.initialized) {
- initHandles( pane );
- initHotkeys( pane );
- resizeAll(); // will sizeContent if pane is visible
- if (s.isVisible) { // pane is OPEN
- if (o.triggerEventsOnLoad)
- _runCallbacks("onresize_end", pane);
- else // automatic if onresize called, otherwise call it specifically
- // resize child - IF inner-layout already exists (created before this layout)
- resizeChildLayout(pane); // a previously existing childLayout
- }
- if (o.initChildLayout && o.childOptions)
- createChildLayout(pane);
- }
- }
-
- /**
- * Initialize module objects, styling, size and position for all resize bars and toggler buttons
- *
- * @see _create()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initHandles = function (panes) {
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV
- $.each(panes, function (i, pane) {
- var $P = $Ps[pane];
- $Rs[pane] = false; // INIT
- $Ts[pane] = false;
- if (!$P) return; // pane does not exist - skip
-
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , side = c.side.toLowerCase()
- , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed)
- , _pane = "-"+ pane // used for classNames
- , _state = (s.isVisible ? "-open" : "-closed") // used for classNames
- , I = Instance[pane]
- // INIT RESIZER BAR
- , $R = I.resizer = $Rs[pane] = $("<div></div>")
- // INIT TOGGLER BUTTON
- , $T = I.toggler = (o.closable ? $Ts[pane] = $("<div></div>") : false)
- ;
-
- //if (s.isVisible && o.resizable) ... handled by initResizable
- if (!s.isVisible && o.slidable)
- $R.attr("title", o.sliderTip).css("cursor", o.sliderCursor);
-
- $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer"
- .attr("id", (o.paneSelector.substr(0,1)=="#" ? o.paneSelector.substr(1) + "-resizer" : ""))
- .data({
- parentLayout: Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "resizer"
- })
- .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal)
- .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles
- .addClass(rClass +" "+ rClass+_pane)
- .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead
- .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter
- .appendTo($N) // append DIV to container
- ;
-
- if ($T) {
- $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler"
- .attr("id", (o.paneSelector.substr(0,1)=="#" ? o.paneSelector.substr(1) + "-toggler" : ""))
- .data({
- parentLayout: Instance
- , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- , layoutRole: "toggler"
- })
- .css(_c.togglers.cssReq) // add base/required styles
- .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles
- .addClass(tClass +" "+ tClass+_pane)
- .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead
- .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer
- .appendTo($R) // append SPAN to resizer DIV
- ;
- // ADD INNER-SPANS TO TOGGLER
- if (o.togglerContent_open) // ui-layout-open
- $("<span>"+ o.togglerContent_open +"</span>")
- .data({
- layoutEdge: pane
- , layoutRole: "togglerContent"
- })
- .data("layoutRole", "togglerContent")
- .data("layoutEdge", pane)
- .addClass("content content-open")
- .css("display","none")
- .appendTo( $T )
- //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead!
- ;
- if (o.togglerContent_closed) // ui-layout-closed
- $("<span>"+ o.togglerContent_closed +"</span>")
- .data({
- layoutEdge: pane
- , layoutRole: "togglerContent"
- })
- .addClass("content content-closed")
- .css("display","none")
- .appendTo( $T )
- //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead!
- ;
- // ADD TOGGLER.click/.hover
- enableClosable(pane);
- }
-
- // add Draggable events
- initResizable(pane);
-
- // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open"
- if (s.isVisible)
- setAsOpen(pane); // onOpen will be called, but NOT onResize
- else {
- setAsClosed(pane); // onClose will be called
- bindStartSlidingEvent(pane, true); // will enable events IF option is set
- }
-
- });
-
- // SET ALL HANDLE DIMENSIONS
- sizeHandles();
- }
-
-
- /**
- * Initialize scrolling ui-layout-content div - if exists
- *
- * @see initPane() - or externally after an Ajax injection
- * @param {string} [pane] The pane to process
- * @param {boolean=} [resize=true] Size content after init
- */
-, initContent = function (pane, resize) {
- if (!isInitialized()) return;
- var
- o = options[pane]
- , sel = o.contentSelector
- , I = Instance[pane]
- , $P = $Ps[pane]
- , $C
- ;
- if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent)
- ? $P.find(sel).eq(0) // match 1-element only
- : $P.children(sel).eq(0)
- ;
- if ($C && $C.length) {
- $C.data("layoutRole", "content");
- // SAVE original Pane CSS
- if (!$C.data("layoutCSS"))
- $C.data("layoutCSS", elCSS($C, "height"));
- $C.css( _c.content.cssReq );
- if (o.applyDemoStyles) {
- $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div
- $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane
- }
- state[pane].content = {}; // init content state
- if (resize !== false) sizeContent(pane);
- // sizeContent() is called AFTER init of all elements
- }
- else
- I.content = $Cs[pane] = false;
- }
-
-
- /**
- * Add resize-bars to all panes that specify it in options
- * -dependancy: $.fn.resizable - will skip if not found
- *
- * @see _create()
- * @param {string=} [panes=""] The edge(s) to process
- */
-, initResizable = function (panes) {
- var draggingAvailable = $.layout.plugins.draggable
- , side // set in start()
- ;
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- $.each(panes, function (idx, pane) {
- var o = options[pane];
- if (!draggingAvailable || !$Ps[pane] || !o.resizable) {
- o.resizable = false;
- return true; // skip to next
- }
-
- var s = state[pane]
- , z = options.zIndexes
- , c = _c[pane]
- , side = c.dir=="horz" ? "top" : "left"
- , opEdge = _c.oppositeEdge[pane]
- , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "")
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , base = o.resizerClass
- , lastPos = 0 // used when live-resizing
- , r, live // set in start because may change
- // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process
- , resizerClass = base+"-drag" // resizer-drag
- , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag
- // 'helper' class is applied to the CLONED resizer-bar while it is being dragged
- , helperClass = base+"-dragging" // resizer-dragging
- , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging
- , helperLimitClass = base+"-dragging-limit" // resizer-drag
- , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag
- , helperClassesSet = false // logic var
- ;
-
- if (!s.isClosed)
- $R.attr("title", o.resizerTip)
- .css("cursor", o.resizerCursor); // n-resize, s-resize, etc
-
- $R.draggable({
- containment: $N[0] // limit resizing to layout container
- , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis
- , delay: 0
- , distance: 1
- , grid: o.resizingGrid
- // basic format for helper - style it using class: .ui-draggable-dragging
- , helper: "clone"
- , opacity: o.resizerDragOpacity
- , addClasses: false // avoid ui-state-disabled class when disabled
- //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed
- , zIndex: z.resizer_drag
-
- , start: function (e, ui) {
- // REFRESH options & state pointers in case we used swapPanes
- o = options[pane];
- s = state[pane];
- // re-read options
- live = o.livePaneResizing;
-
- // ondrag_start callback - will CANCEL hide if returns false
- // TODO: dragging CANNOT be cancelled like this, so see if there is a way?
- if (false === _runCallbacks("ondrag_start", pane)) return false;
-
- s.isResizing = true; // prevent pane from closing while resizing
- timer.clear(pane+"_closeSlider"); // just in case already triggered
-
- // SET RESIZER LIMITS - used in drag()
- setSizeLimits(pane); // update pane/resizer state
- r = s.resizerPosition;
- lastPos = ui.position[ side ]
-
- $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes
- helperClassesSet = false; // reset logic var - see drag()
-
- // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver)
- $('body').disableSelection();
-
- // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS
- showMasks( masks );
- }
-
- , drag: function (e, ui) {
- if (!helperClassesSet) { // can only add classes after clone has been added to the DOM
- //$(".ui-draggable-dragging")
- ui.helper
- .addClass( helperClass +" "+ helperPaneClass ) // add helper classes
- .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue
- .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar
- ;
- helperClassesSet = true;
- // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane!
- if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding);
- }
- // CONTAIN RESIZER-BAR TO RESIZING LIMITS
- var limit = 0;
- if (ui.position[side] < r.min) {
- ui.position[side] = r.min;
- limit = -1;
- }
- else if (ui.position[side] > r.max) {
- ui.position[side] = r.max;
- limit = 1;
- }
- // ADD/REMOVE dragging-limit CLASS
- if (limit) {
- ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit
- window.defaultStatus = (limit>0 && pane.match(/north|west/)) || (limit<0 && pane.match(/south|east/)) ? lang.maxSizeWarning : lang.minSizeWarning;
- }
- else {
- ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit
- window.defaultStatus = "";
- }
- // DYNAMICALLY RESIZE PANES IF OPTION ENABLED
- // won't trigger unless resizer has actually moved!
- if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) {
- lastPos = ui.position[side];
- resizePanes(e, ui, pane)
- }
- }
-
- , stop: function (e, ui) {
- $('body').enableSelection(); // RE-ENABLE TEXT SELECTION
- window.defaultStatus = ""; // clear 'resizing limit' message from statusbar
- $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer
- s.isResizing = false;
- resizePanes(e, ui, pane, true, masks); // true = resizingDone
- }
-
- });
- });
-
- /**
- * resizePanes
- *
- * Sub-routine called from stop() - and drag() if livePaneResizing
- *
- * @param {!Object} evt
- * @param {!Object} ui
- * @param {string} pane
- * @param {boolean=} [resizingDone=false]
- */
- var resizePanes = function (evt, ui, pane, resizingDone, masks) {
- var dragPos = ui.position
- , c = _c[pane]
- , o = options[pane]
- , s = state[pane]
- , resizerPos
- ;
- switch (pane) {
- case "north": resizerPos = dragPos.top; break;
- case "west": resizerPos = dragPos.left; break;
- case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break;
- case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break;
- };
- // remove container margin from resizer position to get the pane size
- var newSize = resizerPos - sC["inset"+ c.side];
-
- // Disable OR Resize Mask(s) created in drag.start
- if (!resizingDone) {
- // ensure we meet liveResizingTolerance criteria
- if (Math.abs(newSize - s.size) < o.liveResizingTolerance)
- return; // SKIP resize this time
- // resize the pane
- manualSizePane(pane, newSize, false, true); // true = noAnimation
- sizeMasks(); // resize all visible masks
- }
- else { // resizingDone
- // ondrag_end callback
- if (false !== _runCallbacks("ondrag_end", pane))
- manualSizePane(pane, newSize, false, true); // true = noAnimation
- hideMasks(); // hide all masks, which include panes with 'content/iframe-masks'
- if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane
- showMasks( masks, true ); // true = onlyForObjects
- }
- };
- }
-
- /**
- * sizeMask
- *
- * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane
- * Called when mask created, and during livePaneResizing
- */
-, sizeMask = function () {
- var $M = $(this)
- , pane = $M.data("layoutMask") // eg: "west"
- , s = state[pane]
- ;
- // only masks over an IFRAME-pane need manual resizing
- if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes
- $M.css({
- top: s.offsetTop
- , left: s.offsetLeft
- , width: s.outerWidth
- , height: s.outerHeight
- });
- /* ALT Method...
- var $P = $Ps[pane];
- $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight });
- */
- }
-, sizeMasks = function () {
- $Ms.each( sizeMask ); // resize all 'visible' masks
- }
-
-, showMasks = function (panes, onlyForObjects) {
- var a = panes ? panes.split(",") : $.layout.config.allPanes
- , z = options.zIndexes
- , o, s;
- $.each(a, function(i,p){
- s = state[p];
- o = options[p];
- if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) {
- getMasks(p).each(function(){
- sizeMask.call(this);
- this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1
- this.style.display = "block";
- });
- }
- });
- }
-
-, hideMasks = function () {
- // ensure no pane is resizing - could be a timing issue
- var skip;
- $.each( $.layout.config.borderPanes, function(i,p){
- if (state[p].isResizing) {
- skip = true;
- return false; // BREAK
- }
- });
- if (!skip)
- $Ms.hide(); // hide ALL masks
- }
-
-, getMasks = function (pane) {
- var $Masks = $([])
- , $M, i = 0, c = $Ms.length
- ;
- for (; i<c; i++) {
- $M = $Ms.eq(i);
- if ($M.data("layoutMask") === pane)
- $Masks = $Masks.add( $M );
- }
- if ($Masks.length)
- return $Masks;
- else
- return createMasks(pane);
- }
-
- /**
- * createMasks
- *
- * Generates both DIV (ALWAYS used) and IFRAME (optional) elements as masks
- * An IFRAME mask is created *under* the DIV when maskObjects=true, because a DIV cannot mask an applet
- */
-, createMasks = function (pane) {
- var
- $P = $Ps[pane]
- , s = state[pane]
- , o = options[pane]
- , z = options.zIndexes
- //, objMask = o.maskObjects && s.tagName != "IFRAME" // check for option
- , $Masks = $([])
- , isIframe, el, $M, css, i
- ;
- if (!o.maskContents && !o.maskObjects) return $Masks;
- // if o.maskObjects=true, then loop TWICE to create BOTH kinds of mask, else only create a DIV
- for (i=0; i < (o.maskObjects ? 2 : 1); i++) {
- isIframe = o.maskObjects && i==0;
- el = document.createElement( isIframe ? "iframe" : "div" );
- $M = $(el).data("layoutMask", pane); // add data to relate mask to pane
- el.className = "ui-layout-mask ui-layout-mask-"+ pane; // for user styling
- css = el.style;
- // styles common to both DIVs and IFRAMES
- css.display = "block";
- css.position = "absolute";
- if (isIframe) { // IFRAME-only props
- el.frameborder = 0;
- el.src = "about:blank";
- css.opacity = 0;
- css.filter = "Alpha(Opacity='0')";
- css.border = 0;
- }
- // if pane is an IFRAME, then must mask the pane itself
- if (s.tagName == "IFRAME") {
- // NOTE sizing done by a subroutine so can be called during live-resizing
- css.zIndex = z.pane_normal+1; // 1-higher than pane
- $N.append( el ); // append to LAYOUT CONTAINER
- }
- // otherwise put masks *inside the pane* to mask its contents
- else {
- $M.addClass("ui-layout-mask-inside-pane");
- css.zIndex = o.maskZindex || z.content_mask; // usually 1, but customizable
- css.top = 0;
- css.left = 0;
- css.width = "100%";
- css.height = "100%";
- $P.append( el ); // append INSIDE pane element
- }
- // add to return object
- $Masks = $Masks.add( el );
- // add Mask to cached array so can be resized & reused
- $Ms = $Ms.add( el );
- }
- return $Masks;
- }
-
-
- /**
- * Destroy this layout and reset all elements
- *
- * @param {boolean=} [destroyChildren=false] Destory Child-Layouts first?
- */
-, destroy = function (destroyChildren) {
- // UNBIND layout events and remove global object
- $(window).unbind("."+ sID); // resize & unload
- $(document).unbind("."+ sID); // keyDown (hotkeys)
-
- // need to look for parent layout BEFORE we remove the container data, else skips a level
- //var parentPane = Instance.hasParentLayout ? $.layout.getParentPaneInstance( $N ) : null;
-
- // reset layout-container
- $N .clearQueue()
- .removeData("layout")
- .removeData("layoutContainer")
- .removeClass(options.containerClass)
- .unbind("."+ sID) // remove ALL Layout events
- ;
-
- // remove all mask elements that have been created
- $Ms.remove();
-
- // loop all panes to remove layout classes, attributes and bindings
- $.each(_c.allPanes, function (i, pane) {
- removePane( pane, false, true, destroyChildren ); // true = skipResize
- });
-
- // do NOT reset container CSS if is a 'pane' (or 'content') in an outer-layout - ie, THIS layout is 'nested'
- var css = "layoutCSS";
- if ($N.data(css) && !$N.data("layoutRole")) // RESET CSS
- $N.css( $N.data(css) ).removeData(css);
-
- // for full-page layouts, also reset the <HTML> CSS
- if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET <HTML> CSS
- $N.css( $N.data(css) ).removeData(css);
-
- // trigger plugins for this layout, if there are any
- runPluginCallbacks( Instance, $.layout.onDestroy );
-
- // trigger state-management and onunload callback
- unload();
-
- // clear the Instance of everything except for container & options (so could recreate)
- // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options );
- for (n in Instance)
- if (!n.match(/^(container|options)$/)) delete Instance[ n ];
- // add a 'destroyed' flag to make it easy to check
- Instance.destroyed = true;
-
- // if this is a child layout, CLEAR the child-pointer in the parent
- /* for now the pointer REMAINS, but with only container, options and destroyed keys
- if (parentPane) {
- var layout = parentPane.pane.data("parentLayout");
- parentPane.child = layout.children[ parentPane.name ] = null;
- }
- */
-
- return Instance; // for coding convenience
- }
-
- /**
- * Remove a pane from the layout - subroutine of destroy()
- *
- * @see destroy()
- * @param {string} pane The pane to process
- * @param {boolean=} [remove=false] Remove the DOM element?
- * @param {boolean=} [skipResize=false] Skip calling resizeAll()?
- */
-, removePane = function (evt_or_pane, remove, skipResize, destroyChild) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , $C = $Cs[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- ;
- //alert( '$P.length = '+ $P.length );
- // NOTE: elements can still exist even after remove()
- // so check for missing data(), which is cleared by removed()
- if ($P && $.isEmptyObject( $P.data() )) $P = false;
- if ($C && $.isEmptyObject( $C.data() )) $C = false;
- if ($R && $.isEmptyObject( $R.data() )) $R = false;
- if ($T && $.isEmptyObject( $T.data() )) $T = false;
-
- if ($P) $P.stop(true, true);
-
- // check for a child layout
- var o = options[pane]
- , s = state[pane]
- , d = "layout"
- , css = "layoutCSS"
- , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null
- , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout
- ;
-
- // FIRST destroy the child-layout(s)
- if (destroy && child && !child.destroyed) {
- child.destroy(true); // tell child-layout to destroy ALL its child-layouts too
- if (child.destroyed) // destroy was successful
- child = null; // clear pointer for logic below
- }
-
- if ($P && remove && !child)
- $P.remove();
- else if ($P && $P[0]) {
- // create list of ALL pane-classes that need to be removed
- var root = o.paneClass // default="ui-layout-pane"
- , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west"
- , _open = "-open"
- , _sliding= "-sliding"
- , _closed = "-closed"
- , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes
- pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes
- ;
- $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes
- // remove all Layout classes from pane-element
- $P .removeClass( classes.join(" ") ) // remove ALL pane-classes
- .removeData("parentLayout")
- .removeData("layoutPane")
- .removeData("layoutRole")
- .removeData("layoutEdge")
- .removeData("autoHidden") // in case set
- .unbind("."+ sID) // remove ALL Layout events
- // TODO: remove these extra unbind commands when jQuery is fixed
- //.unbind("mouseenter"+ sID)
- //.unbind("mouseleave"+ sID)
- ;
- // do NOT reset CSS if this pane/content is STILL the container of a nested layout!
- // the nested layout will reset its 'container' CSS when/if it is destroyed
- if ($C && $C.data(d)) {
- // a content-div may not have a specific width, so give it one to contain the Layout
- $C.width( $C.width() );
- child.resizeAll(); // now resize the Layout
- }
- else if ($C)
- $C.css( $C.data(css) ).removeData(css).removeData("layoutRole");
- // remove pane AFTER content in case there was a nested layout
- if (!$P.data(d))
- $P.css( $P.data(css) ).removeData(css);
- }
-
- // REMOVE pane resizer and toggler elements
- if ($T) $T.remove();
- if ($R) $R.remove();
-
- // CLEAR all pointers and state data
- Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false;
- s = { removed: true };
-
- if (!skipResize)
- resizeAll();
- }
-
-
-/*
- * ###########################
- * ACTION METHODS
- * ###########################
- */
-
-, _hidePane = function (pane) {
- var $P = $Ps[pane]
- , o = options[pane]
- , s = $P[0].style
- ;
- if (o.useOffscreenClose) {
- if (!$P.data(_c.offscreenReset))
- $P.data(_c.offscreenReset, { left: s.left, right: s.right });
- $P.css( _c.offscreenCSS );
- }
- else
- $P.hide().removeData(_c.offscreenReset);
- }
-
-, _showPane = function (pane) {
- var $P = $Ps[pane]
- , o = options[pane]
- , off = _c.offscreenCSS
- , old = $P.data(_c.offscreenReset)
- , s = $P[0].style
- ;
- $P .show() // ALWAYS show, just in case
- .removeData(_c.offscreenReset);
- if (o.useOffscreenClose && old) {
- if (s.left == off.left)
- s.left = old.left;
- if (s.right == off.right)
- s.right = old.right;
- }
- }
-
-
- /**
- * Completely 'hides' a pane, including its spacing - as if it does not exist
- * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it
- *
- * @param {string} pane The pane being hidden, ie: north, south, east, or west
- * @param {boolean=} [noAnimation=false]
- */
-, hide = function (evt_or_pane, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- if (!$P || s.isHidden) return; // pane does not exist OR is already hidden
-
- // onhide_start callback - will CANCEL hide if returns false
- if (state.initialized && false === _runCallbacks("onhide_start", pane)) return;
-
- s.isSliding = false; // just in case
-
- // now hide the elements
- if ($R) $R.hide(); // hide resizer-bar
- if (!state.initialized || s.isClosed) {
- s.isClosed = true; // to trigger open-animation on show()
- s.isHidden = true;
- s.isVisible = false;
- if (!state.initialized)
- _hidePane(pane); // no animation when loading page
- sizeMidPanes(_c[pane].dir === "horz" ? "" : "center");
- if (state.initialized || o.triggerEventsOnLoad)
- _runCallbacks("onhide_end", pane);
- }
- else {
- s.isHiding = true; // used by onclose
- close(pane, false, noAnimation); // adjust all panes to fit
- }
- }
-
- /**
- * Show a hidden pane - show as 'closed' by default unless openPane = true
- *
- * @param {string} pane The pane being opened, ie: north, south, east, or west
- * @param {boolean=} [openPane=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [noAlert=false]
- */
-, show = function (evt_or_pane, openPane, noAnimation, noAlert) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden
-
- // onshow_start callback - will CANCEL show if returns false
- if (false === _runCallbacks("onshow_start", pane)) return;
-
- s.isSliding = false; // just in case
- s.isShowing = true; // used by onopen/onclose
- //s.isHidden = false; - will be set by open/close - if not cancelled
-
- // now show the elements
- //if ($R) $R.show(); - will be shown by open/close
- if (openPane === false)
- close(pane, true); // true = force
- else
- open(pane, false, noAnimation, noAlert); // adjust all panes to fit
- }
-
-
- /**
- * Toggles a pane open/closed by calling either open or close
- *
- * @param {string} pane The pane being toggled, ie: north, south, east, or west
- * @param {boolean=} [slide=false]
- */
-, toggle = function (evt_or_pane, slide) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , s = state[pane]
- ;
- if (evt) // called from to $R.dblclick OR triggerPaneEvent
- evt.stopImmediatePropagation();
- if (s.isHidden)
- show(pane); // will call 'open' after unhiding it
- else if (s.isClosed)
- open(pane, !!slide);
- else
- close(pane);
- }
-
-
- /**
- * Utility method used during init or other auto-processes
- *
- * @param {string} pane The pane being closed
- * @param {boolean=} [setHandles=false]
- */
-, _closePane = function (pane, setHandles) {
- var
- $P = $Ps[pane]
- , s = state[pane]
- ;
- _hidePane(pane);
- s.isClosed = true;
- s.isVisible = false;
- // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force
- }
-
- /**
- * Close the specified pane (animation optional), and resize all other panes as needed
- *
- * @param {string} pane The pane being closed, ie: north, south, east, or west
- * @param {boolean=} [force=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [skipCallback=false]
- */
-, close = function (evt_or_pane, force, noAnimation, skipCallback) {
- var pane = evtPane.call(this, evt_or_pane);
- // if pane has been initialized, but NOT the complete layout, close pane instantly
- if (!state.initialized && $Ps[pane]) {
- _closePane(pane); // INIT pane as closed
- return;
- }
- if (!isInitialized()) return;
-
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , doFX, isShowing, isHiding, wasSliding;
-
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
-
- if ( !$P
- || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ???
- || (!force && s.isClosed && !s.isShowing) // already closed
- ) return queueNext();
-
- // onclose_start callback - will CANCEL hide if returns false
- // SKIP if just 'showing' a hidden pane as 'closed'
- var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane);
-
- // transfer logic vars to temp vars
- isShowing = s.isShowing;
- isHiding = s.isHiding;
- wasSliding = s.isSliding;
- // now clear the logic vars (REQUIRED before aborting)
- delete s.isShowing;
- delete s.isHiding;
-
- if (abort) return queueNext();
-
- doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none");
- s.isMoving = true;
- s.isClosed = true;
- s.isVisible = false;
- // update isHidden BEFORE sizing panes
- if (isHiding) s.isHidden = true;
- else if (isShowing) s.isHidden = false;
-
- if (s.isSliding) // pane is being closed, so UNBIND trigger events
- bindStopSlidingEvents(pane, false); // will set isSliding=false
- else // resize panes adjacent to this one
- sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback
-
- // if this pane has a resizer bar, move it NOW - before animation
- setAsClosed(pane);
-
- // CLOSE THE PANE
- if (doFX) { // animate the close
- // mask panes with objects
- var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
- showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
- lockPaneForFX(pane, true); // need to set left/top so animation will work
- $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () {
- lockPaneForFX(pane, false); // undo
- if (s.isClosed) close_2();
- queueNext();
- });
- }
- else { // hide the pane without animation
- _hidePane(pane);
- close_2();
- queueNext();
- };
- });
-
- // SUBROUTINE
- function close_2 () {
- s.isMoving = false;
- bindStartSlidingEvent(pane, true); // will enable if o.slidable = true
-
- // if opposite-pane was autoClosed, see if it can be autoOpened now
- var altPane = _c.oppositeEdge[pane];
- if (state[ altPane ].noRoom) {
- setSizeLimits( altPane );
- makePaneFit( altPane );
- }
-
- // hide any masks shown while closing
- hideMasks();
-
- if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) {
- // onclose callback - UNLESS just 'showing' a hidden pane as 'closed'
- if (!isShowing) _runCallbacks("onclose_end", pane);
- // onhide OR onshow callback
- if (isShowing) _runCallbacks("onshow_end", pane);
- if (isHiding) _runCallbacks("onhide_end", pane);
- }
- }
- }
-
- /**
- * @param {string} pane The pane just closed, ie: north, south, east, or west
- */
-, setAsClosed = function (pane) {
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , side = _c[pane].side.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , _pane = "-"+ pane // used for classNames
- , _open = "-open"
- , _sliding= "-sliding"
- , _closed = "-closed"
- ;
- $R
- .css(side, sC[inset]) // move the resizer
- .removeClass( rClass+_open +" "+ rClass+_pane+_open )
- .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
- .addClass( rClass+_closed +" "+ rClass+_pane+_closed )
- .unbind("dblclick."+ sID)
- ;
- // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent?
- if (o.resizable && $.layout.plugins.draggable)
- $R
- .draggable("disable")
- .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here
- .css("cursor", "default")
- .attr("title","")
- ;
-
- // if pane has a toggler button, adjust that too
- if ($T) {
- $T
- .removeClass( tClass+_open +" "+ tClass+_pane+_open )
- .addClass( tClass+_closed +" "+ tClass+_pane+_closed )
- .attr("title", o.togglerTip_closed) // may be blank
- ;
- // toggler-content - if exists
- $T.children(".content-open").hide();
- $T.children(".content-closed").css("display","block");
- }
-
- // sync any 'pin buttons'
- syncPinBtns(pane, false);
-
- if (state.initialized) {
- // resize 'length' and position togglers for adjacent panes
- sizeHandles();
- }
- }
-
- /**
- * Open the specified pane (animation optional), and resize all other panes as needed
- *
- * @param {string} pane The pane being opened, ie: north, south, east, or west
- * @param {boolean=} [slide=false]
- * @param {boolean=} [noAnimation=false]
- * @param {boolean=} [noAlert=false]
- */
-, open = function (evt_or_pane, slide, noAnimation, noAlert) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , doFX, isShowing
- ;
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
-
- if ( !$P
- || (!o.resizable && !o.closable && !s.isShowing) // invalid request
- || (s.isVisible && !s.isSliding) // already open
- ) return queueNext();
-
- // pane can ALSO be unhidden by just calling show(), so handle this scenario
- if (s.isHidden && !s.isShowing) {
- queueNext(); // call before show() because it needs the queue free
- show(pane, true);
- return;
- }
-
- if (o.autoResize && s.size != o.size) // resize pane to original size set in options
- sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
- else
- // make sure there is enough space available to open the pane
- setSizeLimits(pane, slide);
-
- // onopen_start callback - will CANCEL open if returns false
- var cbReturn = _runCallbacks("onopen_start", pane);
-
- if (cbReturn === "abort")
- return queueNext();
-
- // update pane-state again in case options were changed in onopen_start
- if (cbReturn !== "NC") // NC = "No Callback"
- setSizeLimits(pane, slide);
-
- if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN!
- syncPinBtns(pane, false); // make sure pin-buttons are reset
- if (!noAlert && o.noRoomToOpenTip)
- alert(o.noRoomToOpenTip);
- return queueNext(); // ABORT
- }
-
- if (slide) // START Sliding - will set isSliding=true
- bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
- else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead
- bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false
- else if (o.slidable)
- bindStartSlidingEvent(pane, false); // UNBIND trigger events
-
- s.noRoom = false; // will be reset by makePaneFit if 'noRoom'
- makePaneFit(pane);
-
- // transfer logic var to temp var
- isShowing = s.isShowing;
- // now clear the logic var
- delete s.isShowing;
-
- doFX = !noAnimation && s.isClosed && (o.fxName_open != "none");
- s.isMoving = true;
- s.isVisible = true;
- s.isClosed = false;
- // update isHidden BEFORE sizing panes - WHY??? Old?
- if (isShowing) s.isHidden = false;
-
- if (doFX) { // ANIMATE
- // mask panes with objects
- var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
- if (s.isSliding) masks += ","+ _c.oppositeEdge[pane];
- showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
- lockPaneForFX(pane, true); // need to set left/top so animation will work
- $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() {
- lockPaneForFX(pane, false); // undo
- if (s.isVisible) open_2(); // continue
- queueNext();
- });
- }
- else { // no animation
- _showPane(pane);// just show pane and...
- open_2(); // continue
- queueNext();
- };
- });
-
- // SUBROUTINE
- function open_2 () {
- s.isMoving = false;
-
- // cure iframe display issues
- _fixIframe(pane);
-
- // NOTE: if isSliding, then other panes are NOT 'resized'
- if (!s.isSliding) { // resize all panes adjacent to this one
- hideMasks(); // remove any masks shown while opening
- sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback
- }
-
- // set classes, position handles and execute callbacks...
- setAsOpen(pane);
- };
-
- }
-
- /**
- * @param {string} pane The pane just opened, ie: north, south, east, or west
- * @param {boolean=} [skipCallback=false]
- */
-, setAsOpen = function (pane, skipCallback) {
- var
- $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , o = options[pane]
- , s = state[pane]
- , side = _c[pane].side.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , rClass = o.resizerClass
- , tClass = o.togglerClass
- , _pane = "-"+ pane // used for classNames
- , _open = "-open"
- , _closed = "-closed"
- , _sliding= "-sliding"
- ;
- $R
- .css(side, sC[inset] + getPaneSize(pane)) // move the resizer
- .removeClass( rClass+_closed +" "+ rClass+_pane+_closed )
- .addClass( rClass+_open +" "+ rClass+_pane+_open )
- ;
- if (s.isSliding)
- $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
- else // in case 'was sliding'
- $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
-
- if (o.resizerDblClickToggle)
- $R.bind("dblclick", toggle );
- removeHover( 0, $R ); // remove hover classes
- if (o.resizable && $.layout.plugins.draggable)
- $R .draggable("enable")
- .css("cursor", o.resizerCursor)
- .attr("title", o.resizerTip);
- else if (!s.isSliding)
- $R.css("cursor", "default"); // n-resize, s-resize, etc
-
- // if pane also has a toggler button, adjust that too
- if ($T) {
- $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed )
- .addClass( tClass+_open +" "+ tClass+_pane+_open )
- .attr("title", o.togglerTip_open); // may be blank
- removeHover( 0, $T ); // remove hover classes
- // toggler-content - if exists
- $T.children(".content-closed").hide();
- $T.children(".content-open").css("display","block");
- }
-
- // sync any 'pin buttons'
- syncPinBtns(pane, !s.isSliding);
-
- // update pane-state dimensions - BEFORE resizing content
- $.extend(s, elDims($P));
-
- if (state.initialized) {
- // resize resizer & toggler sizes for all panes
- sizeHandles();
- // resize content every time pane opens - to be sure
- sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving'
- }
-
- if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) {
- // onopen callback
- _runCallbacks("onopen_end", pane);
- // onshow callback - TODO: should this be here?
- if (s.isShowing) _runCallbacks("onshow_end", pane);
-
- // ALSO call onresize because layout-size *may* have changed while pane was closed
- if (state.initialized)
- _runCallbacks("onresize_end", pane);
- }
-
- // TODO: Somehow sizePane("north") is being called after this point???
- }
-
-
- /**
- * slideOpen / slideClose / slideToggle
- *
- * Pass-though methods for sliding
- */
-, slideOpen = function (evt_or_pane) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , s = state[pane]
- , delay = options[pane].slideDelay_open
- ;
- // prevent event from triggering on NEW resizer binding created below
- if (evt) evt.stopImmediatePropagation();
-
- if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0)
- // trigger = mouseenter - use a delay
- timer.set(pane+"_openSlider", open_NOW, delay);
- else
- open_NOW(); // will unbind events if is already open
-
- /**
- * SUBROUTINE for timed open
- */
- function open_NOW () {
- if (!s.isClosed) // skip if no longer closed!
- bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
- else if (!s.isMoving)
- open(pane, true); // true = slide - open() will handle binding
- };
- }
-
-, slideClose = function (evt_or_pane) {
- if (!isInitialized()) return;
- var evt = evtObj(evt_or_pane)
- , pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override
- ;
- if (s.isClosed || s.isResizing)
- return; // skip if already closed OR in process of resizing
- else if (o.slideTrigger_close === "click")
- close_NOW(); // close immediately onClick
- else if (o.preventQuickSlideClose && s.isMoving)
- return; // handle Chrome quick-close on slide-open
- else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane]))
- return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
- else if (evt) // trigger = mouseleave - use a delay
- // 1 sec delay if 'opening', else .3 sec
- timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay));
- else // called programically
- close_NOW();
-
- /**
- * SUBROUTINE for timed close
- */
- function close_NOW () {
- if (s.isClosed) // skip 'close' if already closed!
- bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here?
- else if (!s.isMoving)
- close(pane); // close will handle unbinding
- };
- }
-
- /**
- * @param {string} pane The pane being opened, ie: north, south, east, or west
- */
-, slideToggle = function (evt_or_pane) {
- var pane = evtPane.call(this, evt_or_pane);
- toggle(pane, true);
- }
-
-
- /**
- * Must set left/top on East/South panes so animation will work properly
- *
- * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored!
- * @param {boolean} doLock true = set left/top, false = remove
- */
-, lockPaneForFX = function (pane, doLock) {
- var $P = $Ps[pane]
- , s = state[pane]
- , o = options[pane]
- , z = options.zIndexes
- ;
- if (doLock) {
- $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation
- if (pane=="south")
- $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() });
- else if (pane=="east")
- $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() });
- }
- else { // animation DONE - RESET CSS
- // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome
- $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
- if (pane=="south")
- $P.css({ top: "auto" });
- // if pane is positioned 'off-screen', then DO NOT screw with it!
- else if (pane=="east" && !$P.css("left").match(/\-99999/))
- $P.css({ left: "auto" });
- // fix anti-aliasing in IE - only needed for animations that change opacity
- if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1)
- $P[0].style.removeAttribute('filter');
- }
- }
-
-
- /**
- * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger
- *
- * @see open(), close()
- * @param {string} pane The pane to enable/disable, 'north', 'south', etc.
- * @param {boolean} enable Enable or Disable sliding?
- */
-, bindStartSlidingEvent = function (pane, enable) {
- var o = options[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , evtName = o.slideTrigger_open.toLowerCase()
- ;
- if (!$R || (enable && !o.slidable)) return;
-
- // make sure we have a valid event
- if (evtName.match(/mouseover/))
- evtName = o.slideTrigger_open = "mouseenter";
- else if (!evtName.match(/click|dblclick|mouseenter/))
- evtName = o.slideTrigger_open = "click";
-
- $R
- // add or remove event
- [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen)
- // set the appropriate cursor & title/tip
- .css("cursor", enable ? o.sliderCursor : "default")
- .attr("title", enable ? o.sliderTip : "")
- ;
- }
-
- /**
- * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed
- * Also increases zIndex when pane is sliding open
- * See bindStartSlidingEvent for code to control 'slide open'
- *
- * @see slideOpen(), slideClose()
- * @param {string} pane The pane to process, 'north', 'south', etc.
- * @param {boolean} enable Enable or Disable events?
- */
-, bindStopSlidingEvents = function (pane, enable) {
- var o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , z = options.zIndexes
- , evtName = o.slideTrigger_close.toLowerCase()
- , action = (enable ? "bind" : "unbind")
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- ;
- s.isSliding = enable; // logic
- timer.clear(pane+"_closeSlider"); // just in case
-
- // remove 'slideOpen' event from resizer
- // ALSO will raise the zIndex of the pane & resizer
- if (enable) bindStartSlidingEvent(pane, false);
-
- // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not
- $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal);
- $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1
-
- // make sure we have a valid event
- if (!evtName.match(/click|mouseleave/))
- evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout'
-
- // add/remove slide triggers
- $R[action](evtName, slideClose); // base event on resize
- // need extra events for mouseleave
- if (evtName === "mouseleave") {
- // also close on pane.mouseleave
- $P[action]("mouseleave."+ sID, slideClose);
- // cancel timer when mouse moves between 'pane' and 'resizer'
- $R[action]("mouseenter."+ sID, cancelMouseOut);
- $P[action]("mouseenter."+ sID, cancelMouseOut);
- }
-
- if (!enable)
- timer.clear(pane+"_closeSlider");
- else if (evtName === "click" && !o.resizable) {
- // IF pane is not resizable (which already has a cursor and tip)
- // then set the a cursor & title/tip on resizer when sliding
- $R.css("cursor", enable ? o.sliderCursor : "default");
- $R.attr("title", enable ? o.togglerTip_open : ""); // use Toggler-tip, eg: "Close Pane"
- }
-
- // SUBROUTINE for mouseleave timer clearing
- function cancelMouseOut (evt) {
- timer.clear(pane+"_closeSlider");
- evt.stopPropagation();
- }
- }
-
-
- /**
- * Hides/closes a pane if there is insufficient room - reverses this when there is room again
- * MUST have already called setSizeLimits() before calling this method
- *
- * @param {string} pane The pane being resized
- * @param {boolean=} [isOpening=false] Called from onOpen?
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false]
- */
-, makePaneFit = function (pane, isOpening, skipCallback, force) {
- var
- o = options[pane]
- , s = state[pane]
- , c = _c[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , isSidePane = c.dir==="vert"
- , hasRoom = false
- ;
- // special handling for center & east/west panes
- if (pane === "center" || (isSidePane && s.noVerticalRoom)) {
- // see if there is enough room to display the pane
- // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth);
- hasRoom = (s.maxHeight >= 0);
- if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now
- _showPane(pane);
- if ($R) $R.show();
- s.isVisible = true;
- s.noRoom = false;
- if (isSidePane) s.noVerticalRoom = false;
- _fixIframe(pane);
- }
- else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now
- _hidePane(pane);
- if ($R) $R.hide();
- s.isVisible = false;
- s.noRoom = true;
- }
- }
-
- // see if there is enough room to fit the border-pane
- if (pane === "center") {
- // ignore center in this block
- }
- else if (s.minSize <= s.maxSize) { // pane CAN fit
- hasRoom = true;
- if (s.size > s.maxSize) // pane is too big - shrink it
- sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation
- else if (s.size < s.minSize) // pane is too small - enlarge it
- sizePane(pane, s.minSize, skipCallback, force, true);
- // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen
- else if ($R && s.isVisible && $P.is(":visible")) {
- // make sure resizer-bar is positioned correctly
- // handles situation where nested layout was 'hidden' when initialized
- var side = c.side.toLowerCase()
- , pos = s.size + sC["inset"+ c.side]
- ;
- if ($.layout.cssNum($R, side) != pos) $R.css( side, pos );
- }
-
- // if was previously hidden due to noRoom, then RESET because NOW there is room
- if (s.noRoom) {
- // s.noRoom state will be set by open or show
- if (s.wasOpen && o.closable) {
- if (o.autoReopen)
- open(pane, false, true, true); // true = noAnimation, true = noAlert
- else // leave the pane closed, so just update state
- s.noRoom = false;
- }
- else
- show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert
- }
- }
- else { // !hasRoom - pane CANNOT fit
- if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now...
- s.noRoom = true; // update state
- s.wasOpen = !s.isClosed && !s.isSliding;
- if (s.isClosed){} // SKIP
- else if (o.closable) // 'close' if possible
- close(pane, true, true); // true = force, true = noAnimation
- else // 'hide' pane if cannot just be closed
- hide(pane, true); // true = noAnimation
- }
- }
- }
-
-
- /**
- * sizePane / manualSizePane
- * sizePane is called only by internal methods whenever a pane needs to be resized
- * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized'
- *
- * @param {string} pane The pane being resized
- * @param {number} size The *desired* new size for this pane - will be validated
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [noAnimation=false]
- */
-, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , o = options[pane]
- , s = state[pane]
- // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete...
- , forceResize = o.livePaneResizing && !s.isResizing
- ;
- // ANY call to manualSizePane disables autoResize - ie, percentage sizing
- o.autoResize = false;
- // flow-through...
- sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled
- }
-
- /**
- * @param {string} pane The pane being resized
- * @param {number} size The *desired* new size for this pane - will be validated
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false] Force resizing even if does not seem necessary
- * @param {boolean=} [noAnimation=false]
- */
-, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event?
- , o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , side = _c[pane].side.toLowerCase()
- , dimName = _c[pane].sizeType.toLowerCase()
- , inset = "inset"+ _c[pane].side
- , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize
- , doFX = noAnimation !== true && o.animatePaneSizing
- , oldSize, newSize
- ;
- // QUEUE in case another action/animation is in progress
- $N.queue(function( queueNext ){
- // calculate 'current' min/max sizes
- setSizeLimits(pane); // update pane-state
- oldSize = s.size;
- size = _parseSize(pane, size); // handle percentages & auto
- size = max(size, _parseSize(pane, o.minSize));
- size = min(size, s.maxSize);
- if (size < s.minSize) { // not enough room for pane!
- queueNext(); // call before makePaneFit() because it needs the queue free
- makePaneFit(pane, false, skipCallback); // will hide or close pane
- return;
- }
-
- // IF newSize is same as oldSize, then nothing to do - abort
- if (!force && size === oldSize)
- return queueNext();
-
- // onresize_start callback CANNOT cancel resizing because this would break the layout!
- if (!skipCallback && state.initialized && s.isVisible)
- _runCallbacks("onresize_start", pane);
-
- // resize the pane, and make sure its visible
- newSize = cssSize(pane, size);
-
- if (doFX && $P.is(":visible")) { // ANIMATE
- var fx = $.layout.effects.size[pane] || $.layout.effects.size.all
- , easing = o.fxSettings_size.easing || fx.easing
- , z = options.zIndexes
- , props = {};
- props[ dimName ] = newSize +'px';
- s.isMoving = true;
- // overlay all elements during animation
- $P.css({ zIndex: z.pane_animate })
- .show().animate( props, o.fxSpeed_size, easing, function(){
- // reset zIndex after animation
- $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
- s.isMoving = false;
- sizePane_2(); // continue
- queueNext();
- });
- }
- else { // no animation
- $P.css( dimName, newSize ); // resize pane
- // if pane is visible, then
- if ($P.is(":visible"))
- sizePane_2(); // continue
- else {
- // pane is NOT VISIBLE, so just update state data...
- // when pane is *next opened*, it will have the new size
- s.size = size; // update state.size
- $.extend(s, elDims($P)); // update state dimensions
- }
- queueNext();
- };
-
- });
-
- // SUBROUTINE
- function sizePane_2 () {
- /* Panes are sometimes not sized precisely in some browsers!?
- * This code will resize the pane up to 3 times to nudge the pane to the correct size
- */
- var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight()
- , tries = [{
- pane: pane
- , count: 1
- , target: size
- , actual: actual
- , correct: (size === actual)
- , attempt: size
- , cssSize: newSize
- }]
- , lastTry = tries[0]
- , msg = 'Inaccurate size after resizing the '+ pane +'-pane.'
- ;
- while ( !lastTry.correct ) {
- thisTry = { pane: pane, count: lastTry.count+1, target: size };
-
- if (lastTry.actual > size)
- thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size));
- else // lastTry.actual < size
- thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual));
-
- thisTry.cssSize = cssSize(pane, thisTry.attempt);
- $P.css( dimName, thisTry.cssSize );
-
- thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight();
- thisTry.correct = (size === thisTry.actual);
-
- // if showDebugMessages, log attempts and alert the user of this *non-fatal error*
- if (options.showDebugMessages) {
- if ( tries.length === 1) {
- _log(msg, false);
- _log(lastTry, false);
- }
- _log(thisTry, false);
- }
-
- // after 4 tries, is as close as its gonna get!
- if (tries.length > 3) break;
-
- tries.push( thisTry );
- lastTry = tries[ tries.length - 1 ];
- }
- // END TESTING CODE
-
- // update pane-state dimensions
- s.size = size;
- $.extend(s, elDims($P));
-
- if (s.isVisible && $P.is(":visible")) {
- // reposition the resizer-bar
- if ($R) $R.css( side, size + sC[inset] );
- // resize the content-div
- sizeContent(pane);
- }
-
- if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible)
- _runCallbacks("onresize_end", pane);
-
- // resize all the adjacent panes, and adjust their toggler buttons
- // when skipCallback passed, it means the controlling method will handle 'other panes'
- if (!skipCallback) {
- // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize
- if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force);
- sizeHandles();
- }
-
- // if opposite-pane was autoClosed, see if it can be autoOpened now
- var altPane = _c.oppositeEdge[pane];
- if (size < oldSize && state[ altPane ].noRoom) {
- setSizeLimits( altPane );
- makePaneFit( altPane, false, skipCallback );
- }
-
- // DEBUG - ALERT user/developer so they know there was a sizing problem
- if (options.showDebugMessages && tries.length > 1)
- _log(msg +'\nSee the Error Console for details.', true);
- }
- }
-
- /**
- * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide()
- * @param {string} panes The pane(s) being resized, comma-delmited string
- * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
- * @param {boolean=} [force=false]
- */
-, sizeMidPanes = function (panes, skipCallback, force) {
- panes = (panes ? panes : "east,west,center").split(",");
-
- $.each(panes, function (i, pane) {
- if (!$Ps[pane]) return; // NO PANE - skip
- var
- o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , isCenter= (pane=="center")
- , hasRoom = true
- , CSS = {}
- , newCenter = calcNewCenterPaneDims()
- ;
- // update pane-state dimensions
- $.extend(s, elDims($P));
-
- if (pane === "center") {
- if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight)
- return true; // SKIP - pane already the correct size
- // set state for makePaneFit() logic
- $.extend(s, cssMinDims(pane), {
- maxWidth: newCenter.width
- , maxHeight: newCenter.height
- });
- CSS = newCenter;
- // convert OUTER width/height to CSS width/height
- CSS.width = cssW($P, CSS.width);
- // NEW - allow pane to extend 'below' visible area rather than hide it
- CSS.height = cssH($P, CSS.height);
- hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW
- // during layout init, try to shrink east/west panes to make room for center
- if (!state.initialized && o.minWidth > s.outerWidth) {
- var
- reqPx = o.minWidth - s.outerWidth
- , minE = options.east.minSize || 0
- , minW = options.west.minSize || 0
- , sizeE = state.east.size
- , sizeW = state.west.size
- , newE = sizeE
- , newW = sizeW
- ;
- if (reqPx > 0 && state.east.isVisible && sizeE > minE) {
- newE = max( sizeE-minE, sizeE-reqPx );
- reqPx -= sizeE-newE;
- }
- if (reqPx > 0 && state.west.isVisible && sizeW > minW) {
- newW = max( sizeW-minW, sizeW-reqPx );
- reqPx -= sizeW-newW;
- }
- // IF we found enough extra space, then resize the border panes as calculated
- if (reqPx === 0) {
- if (sizeE != minE)
- sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done
- if (sizeW != minW)
- sizePane('west', newW, true, force, true);
- // now start over!
- sizeMidPanes('center', skipCallback, force);
- return; // abort this loop
- }
- }
- }
- else { // for east and west, set only the height, which is same as center height
- // set state.min/maxWidth/Height for makePaneFit() logic
- if (s.isVisible && !s.noVerticalRoom)
- $.extend(s, elDims($P), cssMinDims(pane))
- if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight)
- return true; // SKIP - pane already the correct size
- // east/west have same top, bottom & height as center
- CSS.top = newCenter.top;
- CSS.bottom = newCenter.bottom;
- // NEW - allow pane to extend 'below' visible area rather than hide it
- CSS.height = cssH($P, newCenter.height);
- s.maxHeight = CSS.height;
- hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW
- if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic
- }
-
- if (hasRoom) {
- // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
- if (!skipCallback && state.initialized)
- _runCallbacks("onresize_start", pane);
-
- $P.css(CSS); // apply the CSS to pane
- sizeHandles(pane); // also update resizer length
- if (s.noRoom && !s.isClosed && !s.isHidden)
- makePaneFit(pane); // will re-open/show auto-closed/hidden pane
- if (s.isVisible) {
- $.extend(s, elDims($P)); // update pane dimensions
- if (state.initialized) sizeContent(pane); // also resize the contents, if exists
- }
- }
- else if (!s.noRoom && s.isVisible) // no room for pane
- makePaneFit(pane); // will hide or close pane
-
- if (!s.isVisible)
- return true; // DONE - next pane
-
- /*
- * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes
- * Normally these panes have only 'left' & 'right' positions so pane auto-sizes
- * ALSO required when pane is an IFRAME because will NOT default to 'full width'
- */
- if (pane === "center") { // finished processing midPanes
- var b = $.layout.browser;
- var fix = b.isIE6 || (b.msie && !$.support.boxModel);
- if ($Ps.north && (fix || state.north.tagName=="IFRAME"))
- $Ps.north.css("width", cssW($Ps.north, sC.innerWidth));
- if ($Ps.south && (fix || state.south.tagName=="IFRAME"))
- $Ps.south.css("width", cssW($Ps.south, sC.innerWidth));
- }
-
- // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
- if (!skipCallback && state.initialized)
- _runCallbacks("onresize_end", pane);
- });
- }
-
-
- /**
- * @see window.onresize(), callbacks or custom code
- */
-, resizeAll = function () {
- if (!state.initialized) {
- _initLayoutElements();
- return; // no need to resize since we just initialized!
- }
- var oldW = sC.innerWidth
- , oldH = sC.innerHeight
- ;
- // cannot size layout when 'container' is hidden or collapsed
- if (!$N.is(":visible:") ) return;
- $.extend( state.container, elDims( $N ) ); // UPDATE container dimensions
- if (!sC.outerHeight) return;
-
- // onresizeall_start will CANCEL resizing if returns false
- // state.container has already been set, so user can access this info for calcuations
- if (false === _runCallbacks("onresizeall_start")) return false;
-
- var // see if container is now 'smaller' than before
- shrunkH = (sC.innerHeight < oldH)
- , shrunkW = (sC.innerWidth < oldW)
- , $P, o, s, dir
- ;
- // NOTE special order for sizing: S-N-E-W
- $.each(["south","north","east","west"], function (i, pane) {
- if (!$Ps[pane]) return; // no pane - SKIP
- s = state[pane];
- o = options[pane];
- dir = _c[pane].dir;
-
- if (o.autoResize && s.size != o.size) // resize pane to original size set in options
- sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
- else {
- setSizeLimits(pane);
- makePaneFit(pane, false, true, true); // true=skipCallback/forceResize
- }
- });
-
- sizeMidPanes("", true, true); // true=skipCallback, true=forceResize
- sizeHandles(); // reposition the toggler elements
-
- // trigger all individual pane callbacks AFTER layout has finished resizing
- o = options; // reuse alias
- $.each(_c.allPanes, function (i, pane) {
- $P = $Ps[pane];
- if (!$P) return; // SKIP
- if (state[pane].isVisible) // undefined for non-existent panes
- _runCallbacks("onresize_end", pane); // callback - if exists
- });
-
- _runCallbacks("onresizeall_end");
- //_triggerLayoutEvent(pane, 'resizeall');
- }
-
- /**
- * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll
- *
- * @param {string} pane The pane just resized or opened
- */
-, resizeChildLayout = function (evt_or_pane) {
- var pane = evtPane.call(this, evt_or_pane);
- if (!options[pane].resizeChildLayout) return;
- var $P = $Ps[pane]
- , $C = $Cs[pane]
- , d = "layout"
- , P = Instance[pane]
- , L = children[pane]
- ;
- // user may have manually set EITHER instance pointer, so handle that
- if (P.child && !L) {
- // have to reverse the pointers!
- var el = P.child.container;
- L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance
- }
-
- // if a layout-pointer exists, see if child has been destroyed
- if (L && L.destroyed)
- L = children[pane] = null; // clear child pointers
- // no child layout pointer is set - see if there is a child layout NOW
- if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers
-
- // ALWAYS refresh the pane.child alias
- P.child = children[pane];
-
- if (L) L.resizeAll();
- }
-
-
- /**
- * IF pane has a content-div, then resize all elements inside pane to fit pane-height
- *
- * @param {string=} [panes=""] The pane(s) being resized
- * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured?
- */
-, sizeContent = function (evt_or_panes, remeasure) {
- if (!isInitialized()) return;
-
- var panes = evtPane.call(this, evt_or_panes);
- panes = panes ? panes.split(",") : _c.allPanes;
-
- $.each(panes, function (idx, pane) {
- var
- $P = $Ps[pane]
- , $C = $Cs[pane]
- , o = options[pane]
- , s = state[pane]
- , m = s.content // m = measurements
- ;
- if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip
-
- // if content-element was REMOVED, update OR remove the pointer
- if (!$C.length) {
- initContent(pane, false); // false = do NOT sizeContent() - already there!
- if (!$C) return; // no replacement element found - pointer have been removed
- }
-
- // onsizecontent_start will CANCEL resizing if returns false
- if (false === _runCallbacks("onsizecontent_start", pane)) return;
-
- // skip re-measuring offsets if live-resizing
- if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) {
- _measure();
- // if any footers are below pane-bottom, they may not measure correctly,
- // so allow pane overflow and re-measure
- if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") {
- $P.css("overflow", "visible");
- _measure(); // remeasure while overflowing
- $P.css("overflow", "hidden");
- }
- }
- // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders
- var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom);
-
- if (!$C.is(":visible") || m.height != newH) {
- // size the Content element to fit new pane-size - will autoHide if not enough room
- setOuterHeight($C, newH, true); // true=autoHide
- m.height = newH; // save new height
- };
-
- if (state.initialized)
- _runCallbacks("onsizecontent_end", pane);
-
- function _below ($E) {
- return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0));
- };
-
- function _measure () {
- var
- ignore = options[pane].contentIgnoreSelector
- , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL
- , $Fs_vis = $Fs.filter(':visible')
- , $F = $Fs_vis.filter(':last')
- ;
- m = {
- top: $C[0].offsetTop
- , height: $C.outerHeight()
- , numFooters: $Fs.length
- , hiddenFooters: $Fs.length - $Fs_vis.length
- , spaceBelow: 0 // correct if no content footer ($E)
- }
- m.spaceAbove = m.top; // just for state - not used in calc
- m.bottom = m.top + m.height;
- if ($F.length)
- //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom)
- m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F);
- else // no footer - check marginBottom on Content element itself
- m.spaceBelow = _below($C);
- };
- });
- }
-
-
- /**
- * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary
- *
- * @see initHandles(), open(), close(), resizeAll()
- * @param {string=} [panes=""] The pane(s) being resized
- */
-, sizeHandles = function (evt_or_panes) {
- var panes = evtPane.call(this, evt_or_panes)
- panes = panes ? panes.split(",") : _c.borderPanes;
-
- $.each(panes, function (i, pane) {
- var
- o = options[pane]
- , s = state[pane]
- , $P = $Ps[pane]
- , $R = $Rs[pane]
- , $T = $Ts[pane]
- , $TC
- ;
- if (!$P || !$R) return;
-
- var
- dir = _c[pane].dir
- , _state = (s.isClosed ? "_closed" : "_open")
- , spacing = o["spacing"+ _state]
- , togAlign = o["togglerAlign"+ _state]
- , togLen = o["togglerLength"+ _state]
- , paneLen
- , left
- , offset
- , CSS = {}
- ;
-
- if (spacing === 0) {
- $R.hide();
- return;
- }
- else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason
- $R.show(); // in case was previously hidden
-
- // Resizer Bar is ALWAYS same width/height of pane it is attached to
- if (dir === "horz") { // north/south
- //paneLen = $P.outerWidth(); // s.outerWidth ||
- paneLen = sC.innerWidth; // handle offscreen-panes
- s.resizerLength = paneLen;
- left = $.layout.cssNum($P, "left")
- $R.css({
- width: cssW($R, paneLen) // account for borders & padding
- , height: cssH($R, spacing) // ditto
- , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes
- });
- }
- else { // east/west
- paneLen = $P.outerHeight(); // s.outerHeight ||
- s.resizerLength = paneLen;
- $R.css({
- height: cssH($R, paneLen) // account for borders & padding
- , width: cssW($R, spacing) // ditto
- , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane?
- //, top: $.layout.cssNum($Ps["center"], "top")
- });
- }
-
- // remove hover classes
- removeHover( o, $R );
-
- if ($T) {
- if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) {
- $T.hide(); // always HIDE the toggler when 'sliding'
- return;
- }
- else
- $T.show(); // in case was previously hidden
-
- if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) {
- togLen = paneLen;
- offset = 0;
- }
- else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed
- if (isStr(togAlign)) {
- switch (togAlign) {
- case "top":
- case "left": offset = 0;
- break;
- case "bottom":
- case "right": offset = paneLen - togLen;
- break;
- case "middle":
- case "center":
- default: offset = round((paneLen - togLen) / 2); // 'default' catches typos
- }
- }
- else { // togAlign = number
- var x = parseInt(togAlign, 10); //
- if (togAlign >= 0) offset = x;
- else offset = paneLen - togLen + x; // NOTE: x is negative!
- }
- }
-
- if (dir === "horz") { // north/south
- var width = cssW($T, togLen);
- $T.css({
- width: width // account for borders & padding
- , height: cssH($T, spacing) // ditto
- , left: offset // TODO: VERIFY that toggler positions correctly for ALL values
- , top: 0
- });
- // CENTER the toggler content SPAN
- $T.children(".content").each(function(){
- $TC = $(this);
- $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative
- });
- }
- else { // east/west
- var height = cssH($T, togLen);
- $T.css({
- height: height // account for borders & padding
- , width: cssW($T, spacing) // ditto
- , top: offset // POSITION the toggler
- , left: 0
- });
- // CENTER the toggler content SPAN
- $T.children(".content").each(function(){
- $TC = $(this);
- $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative
- });
- }
-
- // remove ALL hover classes
- removeHover( 0, $T );
- }
-
- // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now
- if (!state.initialized && (o.initHidden || s.noRoom)) {
- $R.hide();
- if ($T) $T.hide();
- }
- });
- }
-
-
- /**
- * @param {string} pane
- */
-, enableClosable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $T = $Ts[pane]
- , o = options[pane]
- ;
- if (!$T) return;
- o.closable = true;
- $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); })
- .css("visibility", "visible")
- .css("cursor", "pointer")
- .attr("title", state[pane].isClosed ? o.togglerTip_closed : o.togglerTip_open) // may be blank
- .show();
- }
- /**
- * @param {string} pane
- * @param {boolean=} [hide=false]
- */
-, disableClosable = function (evt_or_pane, hide) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $T = $Ts[pane]
- ;
- if (!$T) return;
- options[pane].closable = false;
- // is closable is disable, then pane MUST be open!
- if (state[pane].isClosed) open(pane, false, true);
- $T .unbind("."+ sID)
- .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues
- .css("cursor", "default")
- .attr("title", "");
- }
-
-
- /**
- * @param {string} pane
- */
-, enableSlidable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- options[pane].slidable = true;
- if (s.isClosed)
- bindStartSlidingEvent(pane, true);
- }
- /**
- * @param {string} pane
- */
-, disableSlidable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R) return;
- options[pane].slidable = false;
- if (state[pane].isSliding)
- close(pane, false, true);
- else {
- bindStartSlidingEvent(pane, false);
- $R .css("cursor", "default")
- .attr("title", "");
- removeHover(null, $R[0]); // in case currently hovered
- }
- }
-
-
- /**
- * @param {string} pane
- */
-, enableResizable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- , o = options[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- o.resizable = true;
- $R.draggable("enable");
- if (!state[pane].isClosed)
- $R .css("cursor", o.resizerCursor)
- .attr("title", o.resizerTip);
- }
- /**
- * @param {string} pane
- */
-, disableResizable = function (evt_or_pane) {
- if (!isInitialized()) return;
- var pane = evtPane.call(this, evt_or_pane)
- , $R = $Rs[pane]
- ;
- if (!$R || !$R.data('draggable')) return;
- options[pane].resizable = false;
- $R .draggable("disable")
- .css("cursor", "default")
- .attr("title", "");
- removeHover(null, $R[0]); // in case currently hovered
- }
-
-
- /**
- * Move a pane from source-side (eg, west) to target-side (eg, east)
- * If pane exists on target-side, move that to source-side, ie, 'swap' the panes
- *
- * @param {string} pane1 The pane/edge being swapped
- * @param {string} pane2 ditto
- */
-, swapPanes = function (evt_or_pane1, pane2) {
- if (!isInitialized()) return;
- var pane1 = evtPane.call(this, evt_or_pane1);
- // change state.edge NOW so callbacks can know where pane is headed...
- state[pane1].edge = pane2;
- state[pane2].edge = pane1;
- // run these even if NOT state.initialized
- if (false === _runCallbacks("onswap_start", pane1)
- || false === _runCallbacks("onswap_start", pane2)
- ) {
- state[pane1].edge = pane1; // reset
- state[pane2].edge = pane2;
- return;
- }
-
- var
- oPane1 = copy( pane1 )
- , oPane2 = copy( pane2 )
- , sizes = {}
- ;
- sizes[pane1] = oPane1 ? oPane1.state.size : 0;
- sizes[pane2] = oPane2 ? oPane2.state.size : 0;
-
- // clear pointers & state
- $Ps[pane1] = false;
- $Ps[pane2] = false;
- state[pane1] = {};
- state[pane2] = {};
-
- // ALWAYS remove the resizer & toggler elements
- if ($Ts[pane1]) $Ts[pane1].remove();
- if ($Ts[pane2]) $Ts[pane2].remove();
- if ($Rs[pane1]) $Rs[pane1].remove();
- if ($Rs[pane2]) $Rs[pane2].remove();
- $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false;
-
- // transfer element pointers and data to NEW Layout keys
- move( oPane1, pane2 );
- move( oPane2, pane1 );
-
- // cleanup objects
- oPane1 = oPane2 = sizes = null;
-
- // make panes 'visible' again
- if ($Ps[pane1]) $Ps[pane1].css(_c.visible);
- if ($Ps[pane2]) $Ps[pane2].css(_c.visible);
-
- // fix any size discrepancies caused by swap
- resizeAll();
-
- // run these even if NOT state.initialized
- _runCallbacks("onswap_end", pane1);
- _runCallbacks("onswap_end", pane2);
-
- return;
-
- function copy (n) { // n = pane
- var
- $P = $Ps[n]
- , $C = $Cs[n]
- ;
- return !$P ? false : {
- pane: n
- , P: $P ? $P[0] : false
- , C: $C ? $C[0] : false
- , state: $.extend(true, {}, state[n])
- , options: $.extend(true, {}, options[n])
- }
- };
-
- function move (oPane, pane) {
- if (!oPane) return;
- var
- P = oPane.P
- , C = oPane.C
- , oldPane = oPane.pane
- , c = _c[pane]
- , side = c.side.toLowerCase()
- , inset = "inset"+ c.side
- // save pane-options that should be retained
- , s = $.extend({}, state[pane])
- , o = options[pane]
- // RETAIN side-specific FX Settings - more below
- , fx = { resizerCursor: o.resizerCursor }
- , re, size, pos
- ;
- $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) {
- fx[k +"_open"] = o[k +"_open"];
- fx[k +"_close"] = o[k +"_close"];
- fx[k +"_size"] = o[k +"_size"];
- });
-
- // update object pointers and attributes
- $Ps[pane] = $(P)
- .data({
- layoutPane: Instance[pane] // NEW pointer to pane-alias-object
- , layoutEdge: pane
- })
- .css(_c.hidden)
- .css(c.cssReq)
- ;
- $Cs[pane] = C ? $(C) : false;
-
- // set options and state
- options[pane] = $.extend({}, oPane.options, fx);
- state[pane] = $.extend({}, oPane.state);
-
- // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west
- re = new RegExp(o.paneClass +"-"+ oldPane, "g");
- P.className = P.className.replace(re, o.paneClass +"-"+ pane);
-
- // ALWAYS regenerate the resizer & toggler elements
- initHandles(pane); // create the required resizer & toggler
-
- // if moving to different orientation, then keep 'target' pane size
- if (c.dir != _c[oldPane].dir) {
- size = sizes[pane] || 0;
- setSizeLimits(pane); // update pane-state
- size = max(size, state[pane].minSize);
- // use manualSizePane to disable autoResize - not useful after panes are swapped
- manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation
- }
- else // move the resizer here
- $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0));
-
-
- // ADD CLASSNAMES & SLIDE-BINDINGS
- if (oPane.state.isVisible && !s.isVisible)
- setAsOpen(pane, true); // true = skipCallback
- else {
- setAsClosed(pane);
- bindStartSlidingEvent(pane, true); // will enable events IF option is set
- }
-
- // DESTROY the object
- oPane = null;
- };
- }
-
-
- /**
- * INTERNAL method to sync pin-buttons when pane is opened or closed
- * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
- *
- * @see open(), setAsOpen(), setAsClosed()
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin True means set the pin 'down', False means 'up'
- */
-, syncPinBtns = function (pane, doPin) {
- if ($.layout.plugins.buttons)
- $.each(state[pane].pins, function (i, selector) {
- $.layout.buttons.setPinState(Instance, $(selector), pane, doPin);
- });
- }
-
-; // END var DECLARATIONS
-
- /**
- * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed
- *
- * @see document.keydown()
- */
- function keyDown (evt) {
- if (!evt) return true;
- var code = evt.keyCode;
- if (code < 33) return true; // ignore special keys: ENTER, TAB, etc
-
- var
- PANE = {
- 38: "north" // Up Cursor - $.ui.keyCode.UP
- , 40: "south" // Down Cursor - $.ui.keyCode.DOWN
- , 37: "west" // Left Cursor - $.ui.keyCode.LEFT
- , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT
- }
- , ALT = evt.altKey // no worky!
- , SHIFT = evt.shiftKey
- , CTRL = evt.ctrlKey
- , CURSOR = (CTRL && code >= 37 && code <= 40)
- , o, k, m, pane
- ;
-
- if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey
- pane = PANE[code];
- else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey
- $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey
- o = options[p];
- k = o.customHotkey;
- m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT"
- if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches
- if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches
- pane = p;
- return false; // BREAK
- }
- }
- });
-
- // validate pane
- if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden)
- return true;
-
- toggle(pane);
-
- evt.stopPropagation();
- evt.returnValue = false; // CANCEL key
- return false;
- };
-
-
-/*
- * ######################################
- * UTILITY METHODS
- * called externally or by initButtons
- * ######################################
- */
-
- /**
- * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work
- *
- * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
- */
- function allowOverflow (el) {
- if (!isInitialized()) return;
- if (this && this.tagName) el = this; // BOUND to element
- var $P;
- if (isStr(el))
- $P = $Ps[el];
- else if ($(el).data("layoutRole"))
- $P = $(el);
- else
- $(el).parents().each(function(){
- if ($(this).data("layoutRole")) {
- $P = $(this);
- return false; // BREAK
- }
- });
- if (!$P || !$P.length) return; // INVALID
-
- var
- pane = $P.data("layoutEdge")
- , s = state[pane]
- ;
-
- // if pane is already raised, then reset it before doing it again!
- // this would happen if allowOverflow is attached to BOTH the pane and an element
- if (s.cssSaved)
- resetOverflow(pane); // reset previous CSS before continuing
-
- // if pane is raised by sliding or resizing, or its closed, then abort
- if (s.isSliding || s.isResizing || s.isClosed) {
- s.cssSaved = false;
- return;
- }
-
- var
- newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) }
- , curCSS = {}
- , of = $P.css("overflow")
- , ofX = $P.css("overflowX")
- , ofY = $P.css("overflowY")
- ;
- // determine which, if any, overflow settings need to be changed
- if (of != "visible") {
- curCSS.overflow = of;
- newCSS.overflow = "visible";
- }
- if (ofX && !ofX.match(/visible|auto/)) {
- curCSS.overflowX = ofX;
- newCSS.overflowX = "visible";
- }
- if (ofY && !ofY.match(/visible|auto/)) {
- curCSS.overflowY = ofX;
- newCSS.overflowY = "visible";
- }
-
- // save the current overflow settings - even if blank!
- s.cssSaved = curCSS;
-
- // apply new CSS to raise zIndex and, if necessary, make overflow 'visible'
- $P.css( newCSS );
-
- // make sure the zIndex of all other panes is normal
- $.each(_c.allPanes, function(i, p) {
- if (p != pane) resetOverflow(p);
- });
-
- };
- /**
- * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
- */
- function resetOverflow (el) {
- if (!isInitialized()) return;
- if (this && this.tagName) el = this; // BOUND to element
- var $P;
- if (isStr(el))
- $P = $Ps[el];
- else if ($(el).data("layoutRole"))
- $P = $(el);
- else
- $(el).parents().each(function(){
- if ($(this).data("layoutRole")) {
- $P = $(this);
- return false; // BREAK
- }
- });
- if (!$P || !$P.length) return; // INVALID
-
- var
- pane = $P.data("layoutEdge")
- , s = state[pane]
- , CSS = s.cssSaved || {}
- ;
- // reset the zIndex
- if (!s.isSliding && !s.isResizing)
- $P.css("zIndex", options.zIndexes.pane_normal);
-
- // reset Overflow - if necessary
- $P.css( CSS );
-
- // clear var
- s.cssSaved = false;
- };
-
-/*
- * #####################
- * CREATE/RETURN LAYOUT
- * #####################
- */
-
- // validate that container exists
- var $N = $(this).eq(0); // FIRST matching Container element
- if (!$N.length) {
- if (options.showErrorMessages)
- _log( lang.errContainerMissing, true );
- return null;
- };
-
- // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout")
- // return the Instance-pointer if layout has already been initialized
- if ($N.data("layoutContainer") && $N.data("layout"))
- return $N.data("layout"); // cached pointer
-
- // init global vars
- var
- $Ps = {} // Panes x5 - set in initPanes()
- , $Cs = {} // Content x5 - set in initPanes()
- , $Rs = {} // Resizers x4 - set in initHandles()
- , $Ts = {} // Togglers x4 - set in initHandles()
- , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV)
- // aliases for code brevity
- , sC = state.container // alias for easy access to 'container dimensions'
- , sID = state.id // alias for unique layout ID/namespace - eg: "layout435"
- ;
-
- // create Instance object to expose data & option Properties, and primary action Methods
- var Instance = {
- // layout data
- options: options // property - options hash
- , state: state // property - dimensions hash
- // object pointers
- , container: $N // property - object pointers for layout container
- , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center
- , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center
- , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north
- , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north
- // border-pane open/close
- , hide: hide // method - ditto
- , show: show // method - ditto
- , toggle: toggle // method - pass a 'pane' ("north", "west", etc)
- , open: open // method - ditto
- , close: close // method - ditto
- , slideOpen: slideOpen // method - ditto
- , slideClose: slideClose // method - ditto
- , slideToggle: slideToggle // method - ditto
- // pane actions
- , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data
- , _sizePane: sizePane // method -intended for user by plugins only!
- , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto'
- , sizeContent: sizeContent // method - pass a 'pane'
- , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them
- // pane element methods
- , initContent: initContent // method - ditto
- , addPane: addPane // method - pass a 'pane'
- , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem
- , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions
- // special pane option setting
- , enableClosable: enableClosable // method - pass a 'pane'
- , disableClosable: disableClosable // method - ditto
- , enableSlidable: enableSlidable // method - ditto
- , disableSlidable: disableSlidable // method - ditto
- , enableResizable: enableResizable // method - ditto
- , disableResizable: disableResizable// method - ditto
- // utility methods for panes
- , allowOverflow: allowOverflow // utility - pass calling element (this)
- , resetOverflow: resetOverflow // utility - ditto
- // layout control
- , destroy: destroy // method - no parameters
- , initPanes: isInitialized // method - no parameters
- , resizeAll: resizeAll // method - no parameters
- // callback triggering
- , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west")
- // alias collections of options, state and children - created in addPane and extended elsewhere
- , hasParentLayout: false // set by initContainer()
- , children: children // pointers to child-layouts, eg: Instance.children["west"]
- , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] }
- , south: false // ditto
- , west: false // ditto
- , east: false // ditto
- , center: false // ditto
- };
-
- // create the border layout NOW
- if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation
- return null;
- else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later
- return Instance; // return the Instance object
-
-}
-
-
-
-
-/**
- * jquery.layout.state 1.0
- * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
- *
- * Copyright (c) 2010
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
*
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- * @dependancies: $.ui.cookie (above)
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- */
-/*
- * State-management options stored in options.stateManagement, which includes a .cookie hash
- * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden
- *
- * // STATE/COOKIE OPTIONS
- * @example $(el).layout({
- stateManagement: {
- enabled: true
- , stateKeys: "east.size,west.size,east.isClosed,west.isClosed"
- , cookie: { name: "appLayout", path: "/" }
- }
- })
- * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies
- * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } })
- * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" })
- *
- * // STATE/COOKIE METHODS
- * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} );
- * @example myLayout.loadCookie();
- * @example myLayout.deleteCookie();
- * @example var JSON = myLayout.readState(); // CURRENT Layout State
- * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie)
- * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash)
- *
- * CUSTOM STATE-MANAGEMENT (eg, saved in a database)
- * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" );
- * @example myLayout.loadState( JSON );
- */
-
-/**
- * UI COOKIE UTILITY
- *
- * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then...
- * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin
- * NOTE: This utility is REQUIRED by the layout.state plugin
- *
- * Cookie methods in Layout are created as part of State Management
- */
-if (!$.ui) $.ui = {};
-$.ui.cookie = {
-
- // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6
- acceptsCookies: !!navigator.cookieEnabled
-
-, read: function (name) {
- var
- c = document.cookie
- , cs = c ? c.split(';') : []
- , pair // loop var
- ;
- for (var i=0, n=cs.length; i < n; i++) {
- pair = $.trim(cs[i]).split('='); // name=value pair
- if (pair[0] == name) // found the layout cookie
- return decodeURIComponent(pair[1]);
-
- }
- return null;
- }
-
-, write: function (name, val, cookieOpts) {
- var
- params = ''
- , date = ''
- , clear = false
- , o = cookieOpts || {}
- , x = o.expires
- ;
- if (x && x.toUTCString)
- date = x;
- else if (x === null || typeof x === 'number') {
- date = new Date();
- if (x > 0)
- date.setDate(date.getDate() + x);
- else {
- date.setFullYear(1970);
- clear = true;
- }
- }
- if (date) params += ';expires='+ date.toUTCString();
- if (o.path) params += ';path='+ o.path;
- if (o.domain) params += ';domain='+ o.domain;
- if (o.secure) params += ';secure';
- document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie
- }
-
-, clear: function (name) {
- $.ui.cookie.write(name, '', {expires: -1});
- }
-
-};
-// if cookie.jquery.js is not loaded, create an alias to replicate it
-// this may be useful to other plugins or code dependent on that plugin
-if (!$.cookie) $.cookie = function (k, v, o) {
- var C = $.ui.cookie;
- if (v === null)
- C.clear(k);
- else if (v === undefined)
- return C.read(k);
- else
- C.write(k, v, o);
-};
-
-
-// tell Layout that the state plugin is available
-$.layout.plugins.stateManagement = true;
-
-// Add State-Management options to layout.defaults
-$.layout.config.optionRootKeys.push("stateManagement");
-$.layout.defaults.stateManagement = {
- enabled: false // true = enable state-management, even if not using cookies
-, autoSave: true // Save a state-cookie when page exits?
-, autoLoad: true // Load the state-cookie when Layout inits?
- // List state-data to save - must be pane-specific
-, stateKeys: "north.size,south.size,east.size,west.size,"+
- "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+
- "north.isHidden,south.isHidden,east.isHidden,west.isHidden"
-, cookie: {
- name: "" // If not specified, will use Layout.name, else just "Layout"
- , domain: "" // blank = current domain
- , path: "" // blank = current page, '/' = entire website
- , expires: "" // 'days' to keep cookie - leave blank for 'session cookie'
- , secure: false
- }
-};
-// Set stateManagement as a layout-option, NOT a pane-option
-$.layout.optionsMap.layout.push("stateManagement");
-
-/*
- * State Management methods
- */
-$.layout.state = {
-
- /**
- * Get the current layout state and save it to a cookie
- *
- * myLayout.saveCookie( keys, cookieOpts )
- *
- * @param {Object} inst
- * @param {(string|Array)=} keys
- * @param {Object=} opts
- */
- saveCookie: function (inst, keys, cookieOpts) {
- var o = inst.options
- , oS = o.stateManagement
- , oC = $.extend(true, {}, oS.cookie, cookieOpts || null)
- , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state
- ;
- $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC );
- return $.extend(true, {}, data); // return COPY of state.stateData data
- }
-
- /**
- * Remove the state cookie
- *
- * @param {Object} inst
- */
-, deleteCookie: function (inst) {
- var o = inst.options;
- $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" );
- }
-
- /**
- * Read & return data from the cookie - as JSON
- *
- * @param {Object} inst
- */
-, readCookie: function (inst) {
- var o = inst.options;
- var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" );
- // convert cookie string back to a hash and return it
- return c ? $.layout.state.decodeJSON(c) : {};
- }
-
- /**
- * Get data from the cookie and USE IT to loadState
- *
- * @param {Object} inst
- */
-, loadCookie: function (inst) {
- var c = $.layout.state.readCookie(inst); // READ the cookie
- if (c) {
- inst.state.stateData = $.extend(true, {}, c); // SET state.stateData
- inst.loadState(c); // LOAD the retrieved state
- }
- return c;
- }
-
- /**
- * Update layout options from the cookie, if one exists
- *
- * @param {Object} inst
- * @param {Object=} stateData
- * @param {boolean=} animate
- */
-, loadState: function (inst, stateData, animate) {
- stateData = $.layout.transformData( stateData ); // panes = default subkey
- if ($.isEmptyObject( stateData )) return;
- $.extend(true, inst.options, stateData); // update layout options
- // if layout has already been initialized, then UPDATE layout state
- if (inst.state.initialized) {
- var pane, vis, o, s, h, c
- , noAnimate = (animate===false)
- ;
- $.each($.layout.config.borderPanes, function (idx, pane) {
- state = inst.state[pane];
- o = stateData[ pane ];
- if (typeof o != 'object') return; // no key, continue
- s = o.size;
- c = o.initClosed;
- h = o.initHidden;
- vis = state.isVisible;
- // resize BEFORE opening
- if (!vis)
- inst.sizePane(pane, s, false, false);
- if (h === true) inst.hide(pane, noAnimate);
- else if (c === false) inst.open (pane, false, noAnimate);
- else if (c === true) inst.close(pane, false, noAnimate);
- else if (h === false) inst.show (pane, false, noAnimate);
- // resize AFTER any other actions
- if (vis)
- inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed
- });
- };
- }
-
- /**
- * Get the *current layout state* and return it as a hash
- *
- * @param {Object=} inst
- * @param {(string|Array)=} keys
- */
-, readState: function (inst, keys) {
- var
- data = {}
- , alt = { isClosed: 'initClosed', isHidden: 'initHidden' }
- , state = inst.state
- , panes = $.layout.config.allPanes
- , pair, pane, key, val
- ;
- if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user
- if ($.isArray(keys)) keys = keys.join(",");
- // convert keys to an array and change delimiters from '__' to '.'
- keys = keys.replace(/__/g, ".").split(',');
- // loop keys and create a data hash
- for (var i=0, n=keys.length; i < n; i++) {
- pair = keys[i].split(".");
- pane = pair[0];
- key = pair[1];
- if ($.inArray(pane, panes) < 0) continue; // bad pane!
- val = state[ pane ][ key ];
- if (val == undefined) continue;
- if (key=="isClosed" && state[pane]["isSliding"])
- val = true; // if sliding, then *really* isClosed
- ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val;
- }
- return data;
- }
-
- /**
- * Stringify a JSON hash so can save in a cookie or db-field
- */
-, encodeJSON: function (JSON) {
- return parse(JSON);
- function parse (h) {
- var D=[], i=0, k, v, t; // k = key, v = value
- for (k in h) {
- v = h[k];
- t = typeof v;
- if (t == 'string') // STRING - add quotes
- v = '"'+ v +'"';
- else if (t == 'object') // SUB-KEY - recurse into it
- v = parse(v);
- D[i++] = '"'+ k +'":'+ v;
- }
- return '{'+ D.join(',') +'}';
- };
- }
-
- /**
- * Convert stringified JSON back to a hash object
- * @see $.parseJSON(), adding in jQuery 1.4.1
- */
-, decodeJSON: function (str) {
- try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; }
- catch (e) { return {}; }
- }
-
-
-, _create: function (inst) {
- var _ = $.layout.state;
- // ADD State-Management plugin methods to inst
- $.extend( inst, {
- // readCookie - update options from cookie - returns hash of cookie data
- readCookie: function () { return _.readCookie(inst); }
- // deleteCookie
- , deleteCookie: function () { _.deleteCookie(inst); }
- // saveCookie - optionally pass keys-list and cookie-options (hash)
- , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); }
- // loadCookie - readCookie and use to loadState() - returns hash of cookie data
- , loadCookie: function () { return _.loadCookie(inst); }
- // loadState - pass a hash of state to use to update options
- , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); }
- // readState - returns hash of current layout-state
- , readState: function (keys) { return _.readState(inst, keys); }
- // add JSON utility methods too...
- , encodeJSON: _.encodeJSON
- , decodeJSON: _.decodeJSON
- });
-
- // init state.stateData key, even if plugin is initially disabled
- inst.state.stateData = {};
-
- // read and load cookie-data per options
- var oS = inst.options.stateManagement;
- if (oS.enabled) {
- if (oS.autoLoad) // update the options from the cookie
- inst.loadCookie();
- else // don't modify options - just store cookie data in state.stateData
- inst.state.stateData = inst.readCookie();
- }
- }
-
-, _unload: function (inst) {
- var oS = inst.options.stateManagement;
- if (oS.enabled) {
- if (oS.autoSave) // save a state-cookie automatically
- inst.saveCookie();
- else // don't save a cookie, but do store state-data in state.stateData key
- inst.state.stateData = inst.readState();
- }
- }
-
-};
-
-// add state initialization method to Layout's onCreate array of functions
-$.layout.onCreate.push( $.layout.state._create );
-$.layout.onUnload.push( $.layout.state._unload );
-
-
-
-
-/**
- * jquery.layout.buttons 1.0
- * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
- *
- * Copyright (c) 2010
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- *
- * Docs: [ to come ]
- * Tips: [ to come ]
- */
-
-// tell Layout that the state plugin is available
-$.layout.plugins.buttons = true;
-
-// Add buttons options to layout.defaults
-$.layout.defaults.autoBindCustomButtons = false;
-// Specify autoBindCustomButtons as a layout-option, NOT a pane-option
-$.layout.optionsMap.layout.push("autoBindCustomButtons");
-
-var lang = $.layout.language;
-
-/*
- * Button methods
- */
-$.layout.buttons = {
-
- /**
- * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons
- *
- * @see _create()
- *
- * @param {Object} inst Layout Instance object
- */
- init: function (inst) {
- var pre = "ui-layout-button-"
- , layout = inst.options.name || ""
- , name;
- $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) {
- $.each($.layout.config.borderPanes, function (ii, pane) {
- $("."+pre+action+"-"+pane).each(function(){
- // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name'
- name = $(this).data("layoutName") || $(this).attr("layoutName");
- if (name == undefined || name === layout)
- inst.bindButton(this, action, pane);
- });
- });
- });
- }
-
- /**
- * Helper function to validate params received by addButton utilities
- *
- * Two classes are added to the element, based on the buttonClass...
- * The type of button is appended to create the 2nd className:
- * - ui-layout-button-pin // action btnClass
- * - ui-layout-button-pin-west // action btnClass + pane
- * - ui-layout-button-toggle
- * - ui-layout-button-open
- * - ui-layout-button-close
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- *
- * @return {Array.<Object>} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null
- */
-, get: function (inst, selector, pane, action) {
- var $E = $(selector)
- , o = inst.options
- , err = o.showErrorMessages
- ;
- if (!$E.length) { // element not found
- if (err) $.layout.msg(lang.errButton + lang.selector +": "+ selector, true);
- }
- else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified
- if (err) $.layout.msg(lang.errButton + lang.pane +": "+ pane, true);
- $E = $(""); // NO BUTTON
- }
- else { // VALID
- var btn = o[pane].buttonClass +"-"+ action;
- $E .addClass( btn +" "+ btn +"-"+ pane )
- .data("layoutName", o.name); // add layout identifier - even if blank!
- }
- return $E;
- }
-
-
- /**
- * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc.
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} action
- * @param {string} pane
- */
-, bind: function (inst, selector, action, pane) {
- var _ = $.layout.buttons;
- switch (action.toLowerCase()) {
- case "toggle": _.addToggle (inst, selector, pane); break;
- case "open": _.addOpen (inst, selector, pane); break;
- case "close": _.addClose (inst, selector, pane); break;
- case "pin": _.addPin (inst, selector, pane); break;
- case "toggle-slide": _.addToggle (inst, selector, pane, true); break;
- case "open-slide": _.addOpen (inst, selector, pane, true); break;
- }
- return inst;
- }
-
- /**
- * Add a custom Toggler button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- * @param {boolean=} slide true = slide-open, false = pin-open
- */
-, addToggle: function (inst, selector, pane, slide) {
- $.layout.buttons.get(inst, selector, pane, "toggle")
- .click(function(evt){
- inst.toggle(pane, !!slide);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Open button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- * @param {boolean=} slide true = slide-open, false = pin-open
- */
-, addOpen: function (inst, selector, pane, slide) {
- $.layout.buttons.get(inst, selector, pane, "open")
- .attr("title", lang.Open)
- .click(function (evt) {
- inst.open(pane, !!slide);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Close button for a pane
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
- */
-, addClose: function (inst, selector, pane) {
- $.layout.buttons.get(inst, selector, pane, "close")
- .attr("title", lang.Close)
- .click(function (evt) {
- inst.close(pane);
- evt.stopPropagation();
- });
- return inst;
- }
-
- /**
- * Add a custom Pin button for a pane
- *
- * Four classes are added to the element, based on the paneClass for the associated pane...
- * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin:
- * - ui-layout-pane-pin
- * - ui-layout-pane-west-pin
- * - ui-layout-pane-pin-up
- * - ui-layout-pane-west-pin-up
- *
- * @param {Object} inst Layout Instance object
- * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
- * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc.
- */
-, addPin: function (inst, selector, pane) {
- var _ = $.layout.buttons
- , $E = _.get(inst, selector, pane, "pin");
- if ($E.length) {
- var s = inst.state[pane];
- $E.click(function (evt) {
- _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed));
- if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open
- else inst.close( pane ); // slide-closed
- evt.stopPropagation();
- });
- // add up/down pin attributes and classes
- _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding));
- // add this pin to the pane data so we can 'sync it' automatically
- // PANE.pins key is an array so we can store multiple pins for each pane
- s.pins.push( selector ); // just save the selector string
- }
- return inst;
- }
-
- /**
- * Change the class of the pin button to make it look 'up' or 'down'
- *
- * @see addPin(), syncPins()
- *
- * @param {Object} inst Layout Instance object
- * @param {Array.<Object>} $Pin The pin-span element in a jQuery wrapper
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin true = set the pin 'down', false = set it 'up'
- */
-, setPinState: function (inst, $Pin, pane, doPin) {
- var updown = $Pin.attr("pin");
- if (updown && doPin === (updown=="down")) return; // already in correct state
- var
- pin = inst.options[pane].buttonClass +"-pin"
- , side = pin +"-"+ pane
- , UP = pin +"-up "+ side +"-up"
- , DN = pin +"-down "+side +"-down"
- ;
- $Pin
- .attr("pin", doPin ? "down" : "up") // logic
- .attr("title", doPin ? lang.Unpin : lang.Pin)
- .removeClass( doPin ? UP : DN )
- .addClass( doPin ? DN : UP )
- ;
- }
-
- /**
- * INTERNAL function to sync 'pin buttons' when pane is opened or closed
- * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
- *
- * @see open(), close()
- *
- * @param {Object} inst Layout Instance object
- * @param {string} pane These are the params returned to callbacks by layout()
- * @param {boolean} doPin True means set the pin 'down', False means 'up'
- */
-, syncPinBtns: function (inst, pane, doPin) {
- // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE
- $.each(state[pane].pins, function (i, selector) {
- $.layout.buttons.setPinState(inst, $(selector), pane, doPin);
- });
- }
-
-
-, _load: function (inst) {
- var _ = $.layout.buttons;
- // ADD Button methods to Layout Instance
- // Note: sel = jQuery Selector string
- $.extend( inst, {
- bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); }
- // DEPRECATED METHODS
- , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); }
- , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); }
- , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); }
- , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); }
- });
-
- // init state array to hold pin-buttons
- for (var i=0; i<4; i++) {
- var pane = $.layout.config.borderPanes[i];
- inst.state[pane].pins = [];
- }
-
- // auto-init buttons onLoad if option is enabled
- if ( inst.options.autoBindCustomButtons )
- _.init(inst);
- }
-
-, _unload: function (inst) {
- // TODO: unbind all buttons???
- }
-
-};
-
-// add initialization method to Layout's onLoad array of functions
-$.layout.onLoad.push( $.layout.buttons._load );
-//$.layout.onUnload.push( $.layout.buttons._unload );
-
-
-
-/**
- * jquery.layout.browserZoom 1.0
- * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $
- *
- * Copyright (c) 2012
- * Kevin Dalman (http://allpro.net)
- *
- * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
- * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
- *
- * @dependancies: UI Layout 1.3.0.rc30.1 or higher
- *
- * @support: http://groups.google.com/group/jquery-ui-layout
- *
- * @todo: Extend logic to handle other problematic zooming in browsers
- * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event
- */
-
-// tell Layout that the plugin is available
-$.layout.plugins.browserZoom = true;
-
-$.layout.defaults.browserZoomCheckInterval = 1000;
-$.layout.optionsMap.layout.push("browserZoomCheckInterval");
-
-/*
- * browserZoom methods
+ * $Date: 2010-07-13 08:00:00 (Wed, 14 July 2010) $
+ * $Rev: 30293 $
*/
-$.layout.browserZoom = {
-
- _init: function (inst) {
- // abort if browser does not need this check
- if ($.layout.browserZoom.ratio() !== false)
- $.layout.browserZoom._setTimer(inst);
- }
-
-, _setTimer: function (inst) {
- // abort if layout destroyed or browser does not need this check
- if (inst.destroyed) return;
- var o = inst.options
- , s = inst.state
- // don't need check if inst has parentLayout, but check occassionally in case parent destroyed!
- // MINIMUM 100ms interval, for performance
- , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 )
- ;
- // set the timer
- setTimeout(function(){
- if (inst.destroyed || !o.resizeWithWindow) return;
- var d = $.layout.browserZoom.ratio();
- if (d !== s.browserZoom) {
- s.browserZoom = d;
- inst.resizeAll();
- }
- // set a NEW timeout
- $.layout.browserZoom._setTimer(inst);
- }
- , ms );
- }
-
-, ratio: function () {
- var w = window
- , s = screen
- , d = document
- , dE = d.documentElement || d.body
- , b = $.layout.browser
- , v = b.version
- , r, sW, cW
- ;
- // we can ignore all browsers that fire window.resize event onZoom
- if ((b.msie && v > 8)
- || !b.msie
- ) return false; // don't need to track zoom
-
- if (s.deviceXDPI)
- return calc(s.deviceXDPI, s.systemXDPI);
- // everything below is just for future reference!
- if (b.webkit && (r = d.body.getBoundingClientRect))
- return calc((r.left - r.right), d.body.offsetWidth);
- if (b.webkit && (sW = w.outerWidth))
- return calc(sW, w.innerWidth);
- if ((sW = s.width) && (cW = dE.clientWidth))
- return calc(sW, cW);
- return false; // no match, so cannot - or don't need to - track zoom
-
- function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); }
- }
-
-};
-// add initialization method to Layout's onLoad array of functions
-$.layout.onReady.push( $.layout.browserZoom._init );
-
-
-
-})( jQuery ); \ No newline at end of file
+(function($){$.fn.layout=function(opts){var lang={Pane:"Pane",Open:"Open",Close:"Close",Resize:"Resize",Slide:"Slide Open",Pin:"Pin",Unpin:"Un-Pin",selector:"selector",msgNoRoom:"Not enough room to show this pane.",errContainerMissing:"UI Layout Initialization Error\n\nThe specified layout-container does not exist.",errCenterPaneMissing:"UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element.",errContainerHeight:"UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!",errButton:"Error Adding Button \n\nInvalid "};var options={name:"",scrollToBookmarkOnLoad:true,resizeWithWindow:true,resizeWithWindowDelay:200,resizeWithWindowMaxDelay:0,onresizeall_start:null,onresizeall_end:null,onload:null,onunload:null,autoBindCustomButtons:false,zIndex:null,defaults:{applyDemoStyles:false,closable:true,resizable:true,slidable:true,initClosed:false,initHidden:false,contentSelector:".ui-layout-content",contentIgnoreSelector:".ui-layout-ignore",findNestedContent:false,paneClass:"ui-layout-pane",resizerClass:"ui-layout-resizer",togglerClass:"ui-layout-toggler",buttonClass:"ui-layout-button",minSize:0,maxSize:0,spacing_open:6,spacing_closed:6,togglerLength_open:50,togglerLength_closed:50,togglerAlign_open:"center",togglerAlign_closed:"center",togglerTip_open:lang.Close,togglerTip_closed:lang.Open,togglerContent_open:"",togglerContent_closed:"",resizerDblClickToggle:true,autoResize:true,autoReopen:true,resizerDragOpacity:1,maskIframesOnResize:true,resizeNestedLayout:true,resizeWhileDragging:false,resizeContentWhileDragging:false,noRoomToOpenTip:lang.msgNoRoom,resizerTip:lang.Resize,sliderTip:lang.Slide,sliderCursor:"pointer",slideTrigger_open:"click",slideTrigger_close:"mouseleave",hideTogglerOnSlide:false,preventQuickSlideClose:!!($.browser.webkit||$.browser.safari),showOverflowOnHover:false,enableCursorHotkey:true,customHotkeyModifier:"SHIFT",fxName:"slide",fxSpeed:null,fxSettings:{},fxOpacityFix:true,triggerEventsOnLoad:false,triggerEventsWhileDragging:true,onshow_start:null,onshow_end:null,onhide_start:null,onhide_end:null,onopen_start:null,onopen_end:null,onclose_start:null,onclose_end:null,onresize_start:null,onresize_end:null,onsizecontent_start:null,onsizecontent_end:null,onswap_start:null,onswap_end:null,ondrag_start:null,ondrag_end:null},north:{paneSelector:".ui-layout-north",size:"auto",resizerCursor:"n-resize",customHotkey:""},south:{paneSelector:".ui-layout-south",size:"auto",resizerCursor:"s-resize",customHotkey:""},east:{paneSelector:".ui-layout-east",size:200,resizerCursor:"e-resize",customHotkey:""},west:{paneSelector:".ui-layout-west",size:200,resizerCursor:"w-resize",customHotkey:""},center:{paneSelector:".ui-layout-center",minWidth:0,minHeight:0},useStateCookie:false,cookie:{name:"",autoSave:true,autoLoad:true,domain:"",path:"",expires:"",secure:false,keys:"north.size,south.size,east.size,west.size,north.isClosed,south.isClosed,east.isClosed,west.isClosed,north.isHidden,south.isHidden,east.isHidden,west.isHidden"}};var effects={slide:{all:{duration:"fast"},north:{direction:"up"},south:{direction:"down"},east:{direction:"right"},west:{direction:"left"}},drop:{all:{duration:"slow"},north:{direction:"up"},south:{direction:"down"},east:{direction:"right"},west:{direction:"left"}},scale:{all:{duration:"fast"}}};var state={id:"layout"+new Date().getTime(),initialized:false,container:{},north:{},south:{},east:{},west:{},center:{},cookie:{}};var _c={allPanes:"north,south,west,east,center",borderPanes:"north,south,west,east",altSide:{north:"south",south:"north",east:"west",west:"east"},hidden:{visibility:"hidden"},visible:{visibility:"visible"},zIndex:{pane_normal:1,resizer_normal:2,iframe_mask:2,pane_sliding:100,pane_animate:1000,resizer_drag:10000},resizers:{cssReq:{position:"absolute",padding:0,margin:0,fontSize:"1px",textAlign:"left",overflow:"hidden"},cssDemo:{background:"#DDD",border:"none"}},togglers:{cssReq:{position:"absolute",display:"block",padding:0,margin:0,overflow:"hidden",textAlign:"center",fontSize:"1px",cursor:"pointer",zIndex:1},cssDemo:{background:"#AAA"}},content:{cssReq:{position:"relative"},cssDemo:{overflow:"auto",padding:"10px"},cssDemoPane:{overflow:"hidden",padding:0}},panes:{cssReq:{position:"absolute",margin:0},cssDemo:{padding:"10px",background:"#FFF",border:"1px solid #BBB",overflow:"auto"}},north:{side:"Top",sizeType:"Height",dir:"horz",cssReq:{top:0,bottom:"auto",left:0,right:0,width:"auto"},pins:[]},south:{side:"Bottom",sizeType:"Height",dir:"horz",cssReq:{top:"auto",bottom:0,left:0,right:0,width:"auto"},pins:[]},east:{side:"Right",sizeType:"Width",dir:"vert",cssReq:{left:"auto",right:0,top:"auto",bottom:"auto",height:"auto"},pins:[]},west:{side:"Left",sizeType:"Width",dir:"vert",cssReq:{left:0,right:"auto",top:"auto",bottom:"auto",height:"auto"},pins:[]},center:{dir:"center",cssReq:{left:"auto",right:"auto",top:"auto",bottom:"auto",height:"auto",width:"auto"}}};var timer={data:{},set:function(s,fn,ms){timer.clear(s);timer.data[s]=setTimeout(fn,ms)},clear:function(s){var t=timer.data;if(t[s]){clearTimeout(t[s]);delete t[s]}}};var isStr=function(o){try{return typeof o=="string"||(typeof o=="object"&&o.constructor.toString().match(/string/i)!==null)}catch(e){return false}};var str=function(o){return isStr(o)?$.trim(o):o==undefined||o==null?"":o};var min=function(x,y){return Math.min(x,y)};var max=function(x,y){return Math.max(x,y)};var _transformData=function(d){var a,json={cookie:{},defaults:{fxSettings:{}},north:{fxSettings:{}},south:{fxSettings:{}},east:{fxSettings:{}},west:{fxSettings:{}},center:{fxSettings:{}}};d=d||{};if(d.effects||d.cookie||d.defaults||d.north||d.south||d.west||d.east||d.center){json=$.extend(true,json,d)}else{$.each(d,function(key,val){a=key.split("__");if(!a[1]||json[a[0]]){json[a[1]?a[0]:"defaults"][a[1]?a[1]:a[0]]=val}})}return json};var _queue=function(action,pane,param){var tried=[];$.each(_c.borderPanes.split(","),function(i,p){if(_c[p].isMoving){bindCallback(p);return false}});function bindCallback(p){var c=_c[p];if(!c.doCallback){c.doCallback=true;c.callback=action+","+pane+","+(param?1:0)}else{tried.push(p);var cbPane=c.callback.split(",")[1];if(cbPane!=pane&&!$.inArray(cbPane,tried)>=0){bindCallback(cbPane)}}}};var _dequeue=function(pane){var c=_c[pane];_c.isLayoutBusy=false;delete c.isMoving;if(!c.doCallback||!c.callback){return}c.doCallback=false;var cb=c.callback.split(","),param=(cb[2]>0?true:false);if(cb[0]=="open"){open(cb[1],param)}else{if(cb[0]=="close"){close(cb[1],param)}}if(!c.doCallback){c.callback=null}};var _execCallback=function(pane,v_fn){if(!v_fn){return}var fn;try{if(typeof v_fn=="function"){fn=v_fn}else{if(!isStr(v_fn)){return}else{if(v_fn.match(/,/)){var args=v_fn.split(","),fn=eval(args[0]);if(typeof fn=="function"&&args.length>1){return fn(args[1])}}else{fn=eval(v_fn)}}}if(typeof fn=="function"){if(pane&&$Ps[pane]){return fn(pane,$Ps[pane],$.extend({},state[pane]),options[pane],options.name)}else{return fn(Instance,$.extend({},state),options,options.name)}}}catch(ex){}};var _showInvisibly=function($E,force){if(!$E){return{}}if(!$E.jquery){$E=$($E)}var CSS={display:$E.css("display"),visibility:$E.css("visibility")};if(force||CSS.display=="none"){$E.css({display:"block",visibility:"hidden"});return CSS}else{return{}}};var _fixIframe=function(pane){if(state.browser.mozilla){return}var $P=$Ps[pane];if(state[pane].tagName=="IFRAME"){$P.css(_c.hidden).css(_c.visible)}else{$P.find("IFRAME").css(_c.hidden).css(_c.visible)}};var _cssNum=function($E,prop){if(!$E.jquery){$E=$($E)}var CSS=_showInvisibly($E);var val=parseInt($.curCSS($E[0],prop,true),10)||0;$E.css(CSS);return val};var _borderWidth=function(E,side){if(E.jquery){E=E[0]}var b="border"+side.substr(0,1).toUpperCase()+side.substr(1);return $.curCSS(E,b+"Style",true)=="none"?0:(parseInt($.curCSS(E,b+"Width",true),10)||0)};var cssW=function(el,outerWidth){var str=isStr(el),$E=str?$Ps[el]:$(el);if(isNaN(outerWidth)){outerWidth=str?getPaneSize(el):$E.outerWidth()}if(outerWidth<=0){return 0}if(!state.browser.boxModel){return outerWidth}var W=outerWidth-_borderWidth($E,"Left")-_borderWidth($E,"Right")-_cssNum($E,"paddingLeft")-_cssNum($E,"paddingRight");return W>0?W:0};var cssH=function(el,outerHeight){var str=isStr(el),$E=str?$Ps[el]:$(el);if(isNaN(outerHeight)){outerHeight=str?getPaneSize(el):$E.outerHeight()}if(outerHeight<=0){return 0}if(!state.browser.boxModel){return outerHeight}var H=outerHeight-_borderWidth($E,"Top")-_borderWidth($E,"Bottom")-_cssNum($E,"paddingTop")-_cssNum($E,"paddingBottom");return H>0?H:0};var cssSize=function(pane,outerSize){if(_c[pane].dir=="horz"){return cssH(pane,outerSize)}else{return cssW(pane,outerSize)}};var cssMinDims=function(pane){var dir=_c[pane].dir,d={minWidth:1001-cssW(pane,1000),minHeight:1001-cssH(pane,1000)};if(dir=="horz"){d.minSize=d.minHeight}if(dir=="vert"){d.minSize=d.minWidth}return d};var setOuterWidth=function(el,outerWidth,autoHide){var $E=el,w;if(isStr(el)){$E=$Ps[el]}else{if(!el.jquery){$E=$(el)}}w=cssW($E,outerWidth);$E.css({width:w});if(w>0){if(autoHide&&$E.data("autoHidden")&&$E.innerHeight()>0){$E.show().data("autoHidden",false);if(!state.browser.mozilla){$E.css(_c.hidden).css(_c.visible)}}}else{if(autoHide&&!$E.data("autoHidden")){$E.hide().data("autoHidden",true)}}};var setOuterHeight=function(el,outerHeight,autoHide){var $E=el,h;if(isStr(el)){$E=$Ps[el]}else{if(!el.jquery){$E=$(el)}}h=cssH($E,outerHeight);$E.css({height:h,visibility:"visible"});if(h>0&&$E.innerWidth()>0){if(autoHide&&$E.data("autoHidden")){$E.show().data("autoHidden",false);if(!state.browser.mozilla){$E.css(_c.hidden).css(_c.visible)}}}else{if(autoHide&&!$E.data("autoHidden")){$E.hide().data("autoHidden",true)}}};var setOuterSize=function(el,outerSize,autoHide){if(_c[pane].dir=="horz"){setOuterHeight(el,outerSize,autoHide)}else{setOuterWidth(el,outerSize,autoHide)}};var _parseSize=function(pane,size,dir){if(!dir){dir=_c[pane].dir}if(isStr(size)&&size.match(/%/)){size=parseInt(size)/100}if(size===0){return 0}else{if(size>=1){return parseInt(size,10)}else{if(size>0){var o=options,avail;if(dir=="horz"){avail=sC.innerHeight-($Ps.north?o.north.spacing_open:0)-($Ps.south?o.south.spacing_open:0)}else{if(dir=="vert"){avail=sC.innerWidth-($Ps.west?o.west.spacing_open:0)-($Ps.east?o.east.spacing_open:0)}}return Math.floor(avail*size)}else{if(pane=="center"){return 0}else{var $P=$Ps[pane],dim=(dir=="horz"?"height":"width"),vis=_showInvisibly($P),s=$P.css(dim);$P.css(dim,"auto");size=(dim=="height")?$P.outerHeight():$P.outerWidth();$P.css(dim,s).css(vis);return size}}}}};var getPaneSize=function(pane,inclSpace){var $P=$Ps[pane],o=options[pane],s=state[pane],oSp=(inclSpace?o.spacing_open:0),cSp=(inclSpace?o.spacing_closed:0);if(!$P||s.isHidden){return 0}else{if(s.isClosed||(s.isSliding&&inclSpace)){return cSp}else{if(_c[pane].dir=="horz"){return $P.outerHeight()+oSp}else{return $P.outerWidth()+oSp}}}};var setSizeLimits=function(pane,slide){var o=options[pane],s=state[pane],c=_c[pane],dir=c.dir,side=c.side.toLowerCase(),type=c.sizeType.toLowerCase(),isSliding=(slide!=undefined?slide:s.isSliding),$P=$Ps[pane],paneSpacing=o.spacing_open,altPane=_c.altSide[pane],altS=state[altPane],$altP=$Ps[altPane],altPaneSize=(!$altP||altS.isVisible===false||altS.isSliding?0:(dir=="horz"?$altP.outerHeight():$altP.outerWidth())),altPaneSpacing=((!$altP||altS.isHidden?0:options[altPane][altS.isClosed!==false?"spacing_closed":"spacing_open"])||0),containerSize=(dir=="horz"?sC.innerHeight:sC.innerWidth),minCenterDims=cssMinDims("center"),minCenterSize=dir=="horz"?max(options.center.minHeight,minCenterDims.minHeight):max(options.center.minWidth,minCenterDims.minWidth),limitSize=(containerSize-paneSpacing-(isSliding?0:(_parseSize("center",minCenterSize,dir)+altPaneSize+altPaneSpacing))),minSize=s.minSize=max(_parseSize(pane,o.minSize),cssMinDims(pane).minSize),maxSize=s.maxSize=min((o.maxSize?_parseSize(pane,o.maxSize):100000),limitSize),r=s.resizerPosition={},top=sC.insetTop,left=sC.insetLeft,W=sC.innerWidth,H=sC.innerHeight,rW=o.spacing_open;switch(pane){case"north":r.min=top+minSize;r.max=top+maxSize;break;case"west":r.min=left+minSize;r.max=left+maxSize;break;case"south":r.min=top+H-maxSize-rW;r.max=top+H-minSize-rW;break;case"east":r.min=left+W-maxSize-rW;r.max=left+W-minSize-rW;break}};var calcNewCenterPaneDims=function(){var d={top:getPaneSize("north",true),bottom:getPaneSize("south",true),left:getPaneSize("west",true),right:getPaneSize("east",true),width:0,height:0};with(d){width=sC.innerWidth-left-right;height=sC.innerHeight-bottom-top;top+=sC.insetTop;bottom+=sC.insetBottom;left+=sC.insetLeft;right+=sC.insetRight}return d};var getElemDims=function($E){var d={},x=d.css={},i={},b,p,off=$E.offset();d.offsetLeft=off.left;d.offsetTop=off.top;$.each("Left,Right,Top,Bottom".split(","),function(idx,e){b=x["border"+e]=_borderWidth($E,e);p=x["padding"+e]=_cssNum($E,"padding"+e);i[e]=b+p;d["inset"+e]=p});d.offsetWidth=$E.innerWidth(true);d.offsetHeight=$E.innerHeight(true);d.outerWidth=$E.outerWidth();d.outerHeight=$E.outerHeight();d.innerWidth=d.outerWidth-i.Left-i.Right;d.innerHeight=d.outerHeight-i.Top-i.Bottom;x.width=$E.width();x.height=$E.height();return d};var getElemCSS=function($E,list){var CSS={},style=$E[0].style,props=list.split(","),sides="Top,Bottom,Left,Right".split(","),attrs="Color,Style,Width".split(","),p,s,a,i,j,k;for(i=0;i<props.length;i++){p=props[i];if(p.match(/(border|padding|margin)$/)){for(j=0;j<4;j++){s=sides[j];if(p=="border"){for(k=0;k<3;k++){a=attrs[k];CSS[p+s+a]=style[p+s+a]}}else{CSS[p+s]=style[p+s]}}}else{CSS[p]=style[p]}}return CSS};var getHoverClasses=function(el,allStates){var $El=$(el),type=$El.data("layoutRole"),pane=$El.data("layoutEdge"),o=options[pane],root=o[type+"Class"],_pane="-"+pane,_open="-open",_closed="-closed",_slide="-sliding",_hover="-hover ",_state=$El.hasClass(root+_closed)?_closed:_open,_alt=_state==_closed?_open:_closed,classes=(root+_hover)+(root+_pane+_hover)+(root+_state+_hover)+(root+_pane+_state+_hover);if(allStates){classes+=(root+_alt+_hover)+(root+_pane+_alt+_hover)}if(type=="resizer"&&$El.hasClass(root+_slide)){classes+=(root+_slide+_hover)+(root+_pane+_slide+_hover)}return $.trim(classes)};var addHover=function(evt,el){var e=el||this;$(e).addClass(getHoverClasses(e))};var removeHover=function(evt,el){var e=el||this;$(e).removeClass(getHoverClasses(e,true))};var onResizerEnter=function(evt){$("body").disableSelection();addHover(evt,this)};var onResizerLeave=function(evt,el){var e=el||this,pane=$(e).data("layoutEdge"),name=pane+"ResizerLeave";timer.clear(name);if(!el){removeHover(evt,this);timer.set(name,function(){onResizerLeave(evt,e)},200)}else{if(!state[pane].isResizing){$("body").enableSelection()}}};var _create=function(){initOptions();var o=options;if(false===_execCallback(null,o.onload)){return false}if(!getPane("center").length){alert(lang.errCenterPaneMissing);return null}if(o.useStateCookie&&o.cookie.autoLoad){loadCookie()}state.browser={mozilla:$.browser.mozilla,webkit:$.browser.webkit||$.browser.safari,msie:$.browser.msie,isIE6:$.browser.msie&&$.browser.version==6,boxModel:$.support.boxModel};initContainer();initPanes();initResizable();sizeContent();if(o.scrollToBookmarkOnLoad){with(self.location){if(hash){replace(hash)}}}if(o.autoBindCustomButtons){initButtons()}initHotkeys();if(o.resizeWithWindow&&!$Container.data("layoutRole")){$(window).bind("resize."+sID,windowResize)}$(window).bind("unload."+sID,unload);state.initialized=true};var windowResize=function(){var delay=Number(options.resizeWithWindowDelay)||100;if(delay>0){timer.clear("winResize");timer.set("winResize",function(){timer.clear("winResize");timer.clear("winResizeRepeater");resizeAll()},delay);if(!timer.data.winResizeRepeater){setWindowResizeRepeater()}}};var setWindowResizeRepeater=function(){var delay=Number(options.resizeWithWindowMaxDelay);if(delay>0){timer.set("winResizeRepeater",function(){setWindowResizeRepeater();resizeAll()},delay)}};var unload=function(){var o=options;state.cookie=getState();if(o.useStateCookie&&o.cookie.autoSave){saveCookie()}_execCallback(null,o.onunload)};var initContainer=function(){var $C=$Container,tag=sC.tagName=$C.attr("tagName"),fullPage=(tag=="BODY"),props="position,margin,padding,border",CSS={};sC.selector=$C.selector.split(".slice")[0];sC.ref=tag+"/"+sC.selector;$C.data("layoutContainer",sID).data("layoutName",options.name);if(!$C.data("layoutCSS")){if(fullPage){CSS=$.extend(getElemCSS($C,props),{height:$C.css("height"),overflow:$C.css("overflow"),overflowX:$C.css("overflowX"),overflowY:$C.css("overflowY")});var $H=$("html");$H.data("layoutCSS",{height:"auto",overflow:$H.css("overflow"),overflowX:$H.css("overflowX"),overflowY:$H.css("overflowY")})}else{CSS=getElemCSS($C,props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY")}$C.data("layoutCSS",CSS)}try{if(fullPage){$("html").css({height:"100%",overflow:"hidden",overflowX:"hidden",overflowY:"hidden"});$("body").css({position:"relative",height:"100%",overflow:"hidden",overflowX:"hidden",overflowY:"hidden",margin:0,padding:0,border:"none"})}else{var CSS={overflow:"hidden"},p=$C.css("position"),h=$C.css("height");if(!$C.data("layoutRole")){if(!p||!p.match(/fixed|absolute|relative/)){CSS.position="relative"}}$C.css(CSS);if($C.is(":visible")&&$C.innerHeight()<2){alert(lang.errContainerHeight.replace(/CONTAINER/,sC.ref))}}}catch(ex){}$.extend(state.container,getElemDims($C))};var initHotkeys=function(){$.each(_c.borderPanes.split(","),function(i,pane){var o=options[pane];if(o.enableCursorHotkey||o.customHotkey){$(document).bind("keydown."+sID,keyDown);return false}})};var initOptions=function(){opts=_transformData(opts);var newOpts={applyDefaultStyles:"applyDemoStyles"};renameOpts(opts.defaults);$.each(_c.allPanes.split(","),function(i,pane){renameOpts(opts[pane])});if(opts.effects){$.extend(effects,opts.effects);delete opts.effects}$.extend(options.cookie,opts.cookie);var globals="name,zIndex,scrollToBookmarkOnLoad,resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,onresizeall,onresizeall_start,onresizeall_end,onload,onunload,autoBindCustomButtons,useStateCookie";$.each(globals.split(","),function(i,key){if(opts[key]!==undefined){options[key]=opts[key]}else{if(opts.defaults[key]!==undefined){options[key]=opts.defaults[key];delete opts.defaults[key]}}});$.each("paneSelector,resizerCursor,customHotkey".split(","),function(i,key){delete opts.defaults[key]});$.extend(true,options.defaults,opts.defaults);_c.center=$.extend(true,{},_c.panes,_c.center);var z=options.zIndex;if(z===0||z>0){_c.zIndex.pane_normal=z;_c.zIndex.resizer_normal=z+1;_c.zIndex.iframe_mask=z+1}$.extend(options.center,opts.center);var o_Center=$.extend(true,{},options.defaults,opts.defaults,options.center);var optionsCenter=("paneClass,contentSelector,applyDemoStyles,triggerEventsOnLoad,showOverflowOnHover,onresize,onresize_start,onresize_end,resizeNestedLayout,resizeContentWhileDragging,onsizecontent,onsizecontent_start,onsizecontent_end").split(",");$.each(optionsCenter,function(i,key){options.center[key]=o_Center[key]});var o,defs=options.defaults;$.each(_c.borderPanes.split(","),function(i,pane){_c[pane]=$.extend(true,{},_c.panes,_c[pane]);o=options[pane]=$.extend(true,{},options.defaults,options[pane],opts.defaults,opts[pane]);if(!o.paneClass){o.paneClass="ui-layout-pane"}if(!o.resizerClass){o.resizerClass="ui-layout-resizer"}if(!o.togglerClass){o.togglerClass="ui-layout-toggler"}$.each(["_open","_close",""],function(i,n){var sName="fxName"+n,sSpeed="fxSpeed"+n,sSettings="fxSettings"+n;o[sName]=opts[pane][sName]||opts[pane].fxName||opts.defaults[sName]||opts.defaults.fxName||o[sName]||o.fxName||defs[sName]||defs.fxName||"none";var fxName=o[sName];if(fxName=="none"||!$.effects||!$.effects[fxName]||(!effects[fxName]&&!o[sSettings]&&!o.fxSettings)){fxName=o[sName]="none"}var fx=effects[fxName]||{},fx_all=fx.all||{},fx_pane=fx[pane]||{};o[sSettings]=$.extend({},fx_all,fx_pane,defs.fxSettings||{},defs[sSettings]||{},o.fxSettings,o[sSettings],opts.defaults.fxSettings,opts.defaults[sSettings]||{},opts[pane].fxSettings,opts[pane][sSettings]||{});o[sSpeed]=opts[pane][sSpeed]||opts[pane].fxSpeed||opts.defaults[sSpeed]||opts.defaults.fxSpeed||o[sSpeed]||o[sSettings].duration||o.fxSpeed||o.fxSettings.duration||defs.fxSpeed||defs.fxSettings.duration||fx_pane.duration||fx_all.duration||"normal"})});function renameOpts(O){for(var key in newOpts){if(O[key]!=undefined){O[newOpts[key]]=O[key];delete O[key]}}}};var getPane=function(pane){var sel=options[pane].paneSelector;if(sel.substr(0,1)==="#"){return $Container.find(sel).eq(0)}else{var $P=$Container.children(sel).eq(0);return $P.length?$P:$Container.children("form:first").children(sel).eq(0)}};var initPanes=function(){$.each(_c.allPanes.split(","),function(idx,pane){var o=options[pane],s=state[pane],c=_c[pane],fx=s.fx,dir=c.dir,spacing=o.spacing_open||0,isCenter=(pane=="center"),CSS={},$P,$C,size,minSize,maxSize;$Cs[pane]=false;$P=$Ps[pane]=getPane(pane);if(!$P.length){$Ps[pane]=false;return true}if(!$P.data("layoutCSS")){var props="position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";$P.data("layoutCSS",getElemCSS($P,props))}$P.data("layoutName",options.name).data("layoutRole","pane").data("layoutEdge",pane).css(c.cssReq).css("zIndex",_c.zIndex.pane_normal).css(o.applyDemoStyles?c.cssDemo:{}).addClass(o.paneClass+" "+o.paneClass+"-"+pane).bind("mouseenter."+sID,addHover).bind("mouseleave."+sID,removeHover);initContent(pane,false);if(!isCenter){size=s.size=_parseSize(pane,o.size);minSize=_parseSize(pane,o.minSize)||1;maxSize=_parseSize(pane,o.maxSize)||100000;if(size>0){size=max(min(size,maxSize),minSize)}}s.tagName=$P.attr("tagName");s.edge=pane;s.noRoom=false;s.isVisible=true;if(!isCenter){s.isClosed=false;s.isSliding=false;s.isResizing=false;s.isHidden=false}switch(pane){case"north":CSS.top=sC.insetTop;CSS.left=sC.insetLeft;CSS.right=sC.insetRight;break;case"south":CSS.bottom=sC.insetBottom;CSS.left=sC.insetLeft;CSS.right=sC.insetRight;break;case"west":CSS.left=sC.insetLeft;break;case"east":CSS.right=sC.insetRight;break;case"center":}if(dir=="horz"){CSS.height=max(1,cssH(pane,size))}else{if(dir=="vert"){CSS.width=max(1,cssW(pane,size))}}$P.css(CSS);if(dir!="horz"){sizeMidPanes(pane,true)}$P.css({visibility:"visible",display:"block"});if(o.initClosed&&o.closable){close(pane,true,true)}else{if(o.initHidden||o.initClosed){hide(pane)}}if(o.showOverflowOnHover){$P.hover(allowOverflow,resetOverflow)}});initHandles();$.each(_c.borderPanes.split(","),function(i,pane){if($Ps[pane]&&state[pane].isVisible){setSizeLimits(pane);makePaneFit(pane)}});sizeMidPanes("center");$.each(_c.allPanes.split(","),function(i,pane){var o=options[pane];if($Ps[pane]&&o.triggerEventsOnLoad&&state[pane].isVisible){_execCallback(pane,o.onresize_end||o.onresize)}});if($Container.innerHeight()<2){alert(lang.errContainerHeight.replace(/CONTAINER/,sC.ref))}};var initHandles=function(panes){if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(i,pane){var $P=$Ps[pane];$Rs[pane]=false;$Ts[pane]=false;if(!$P){return}var o=options[pane],s=state[pane],c=_c[pane],rClass=o.resizerClass,tClass=o.togglerClass,side=c.side.toLowerCase(),spacing=(s.isVisible?o.spacing_open:o.spacing_closed),_pane="-"+pane,_state=(s.isVisible?"-open":"-closed"),$R=$Rs[pane]=$("<div></div>"),$T=(o.closable?$Ts[pane]=$("<div></div>"):false);if(s.isVisible&&o.resizable){}else{if(!s.isVisible&&o.slidable){$R.attr("title",o.sliderTip).css("cursor",o.sliderCursor)}}$R.attr("id",(o.paneSelector.substr(0,1)=="#"?o.paneSelector.substr(1)+"-resizer":"")).data("layoutRole","resizer").data("layoutEdge",pane).css(_c.resizers.cssReq).css("zIndex",_c.zIndex.resizer_normal).css(o.applyDemoStyles?_c.resizers.cssDemo:{}).addClass(rClass+" "+rClass+_pane).appendTo($Container);if($T){$T.attr("id",(o.paneSelector.substr(0,1)=="#"?o.paneSelector.substr(1)+"-toggler":"")).data("layoutRole","toggler").data("layoutEdge",pane).css(_c.togglers.cssReq).css(o.applyDemoStyles?_c.togglers.cssDemo:{}).addClass(tClass+" "+tClass+_pane).appendTo($R).click(function(evt){toggle(pane);evt.stopPropagation()}).hover(addHover,removeHover);if(o.togglerContent_open){$("<span>"+o.togglerContent_open+"</span>").data("layoutRole","togglerContent").data("layoutEdge",pane).addClass("content content-open").css("display","none").appendTo($T).hover(addHover,removeHover)}if(o.togglerContent_closed){$("<span>"+o.togglerContent_closed+"</span>").data("layoutRole","togglerContent").data("layoutEdge",pane).addClass("content content-closed").css("display","none").appendTo($T).hover(addHover,removeHover)}}if(s.isVisible){setAsOpen(pane)}else{setAsClosed(pane);bindStartSlidingEvent(pane,true)}});sizeHandles("all")};var initContent=function(pane,resize){var o=options[pane],sel=o.contentSelector,$P=$Ps[pane],$C;if(sel){$C=$Cs[pane]=(o.findNestedContent)?$P.find(sel).eq(0):$P.children(sel).eq(0)}if($C&&$C.length){$C.css(_c.content.cssReq);if(o.applyDemoStyles){$C.css(_c.content.cssDemo);$P.css(_c.content.cssDemoPane)}state[pane].content={};if(resize!==false){sizeContent(pane)}}else{$Cs[pane]=false}};var initButtons=function(){var pre="ui-layout-button-",name;$.each("toggle,open,close,pin,toggle-slide,open-slide".split(","),function(i,action){$.each(_c.borderPanes.split(","),function(ii,pane){$("."+pre+action+"-"+pane).each(function(){name=$(this).data("layoutName")||$(this).attr("layoutName");if(name==undefined||name==options.name){if(action.substr("-slide")>0){bindButton(this,action.split("-")[0],pane,true)}else{bindButton(this,action,pane)}}})})})};var initResizable=function(panes){var draggingAvailable=(typeof $.fn.draggable=="function"),$Frames,side;if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(idx,pane){var o=options[pane],s=state[pane],c=_c[pane],side=(c.dir=="horz"?"top":"left"),r,live;if(!draggingAvailable||!$Ps[pane]||!o.resizable){o.resizable=false;return true}var $P=$Ps[pane],$R=$Rs[pane],base=o.resizerClass,resizerClass=base+"-drag",resizerPaneClass=base+"-"+pane+"-drag",helperClass=base+"-dragging",helperPaneClass=base+"-"+pane+"-dragging",helperLimitClass=base+"-dragging-limit",helperClassesSet=false;if(!s.isClosed){$R.attr("title",o.resizerTip).css("cursor",o.resizerCursor)}$R.hover(onResizerEnter,onResizerLeave);$R.draggable({containment:$Container[0],axis:(c.dir=="horz"?"y":"x"),delay:0,distance:1,helper:"clone",opacity:o.resizerDragOpacity,addClasses:false,zIndex:_c.zIndex.resizer_drag,start:function(e,ui){o=options[pane];s=state[pane];live=o.resizeWhileDragging;if(false===_execCallback(pane,o.ondrag_start)){return false}_c.isLayoutBusy=true;s.isResizing=true;timer.clear(pane+"_closeSlider");setSizeLimits(pane);r=s.resizerPosition;$R.addClass(resizerClass+" "+resizerPaneClass);helperClassesSet=false;$Frames=$(o.maskIframesOnResize===true?"iframe":o.maskIframesOnResize).filter(":visible");var id,i=0;$Frames.each(function(){id="ui-layout-mask-"+(++i);$(this).data("layoutMaskID",id);$('<div id="'+id+'" class="ui-layout-mask ui-layout-mask-'+pane+'"/>').css({background:"#fff",opacity:"0.001",zIndex:_c.zIndex.iframe_mask,position:"absolute",width:this.offsetWidth+"px",height:this.offsetHeight+"px"}).css($(this).position()).appendTo(this.parentNode)});$("body").disableSelection()},drag:function(e,ui){if(!helperClassesSet){ui.helper.addClass(helperClass+" "+helperPaneClass).children().css("visibility","hidden");helperClassesSet=true;if(s.isSliding){$Ps[pane].css("zIndex",_c.zIndex.pane_sliding)}}var limit=0;if(ui.position[side]<r.min){ui.position[side]=r.min;limit=-1}else{if(ui.position[side]>r.max){ui.position[side]=r.max;limit=1}}if(limit){ui.helper.addClass(helperLimitClass);window.defaultStatus="Panel has reached its "+((limit>0&&pane.match(/north|west/))||(limit<0&&pane.match(/south|east/))?"maximum":"minimum")+" size"}else{ui.helper.removeClass(helperLimitClass);window.defaultStatus=""}if(live){resizePanes(e,ui,pane)}},stop:function(e,ui){$("body").enableSelection();window.defaultStatus="";$R.removeClass(resizerClass+" "+resizerPaneClass+" "+helperLimitClass);s.isResizing=false;_c.isLayoutBusy=false;resizePanes(e,ui,pane,true)}});var resizePanes=function(e,ui,pane,resizingDone){var dragPos=ui.position,c=_c[pane],resizerPos,newSize,i=0;switch(pane){case"north":resizerPos=dragPos.top;break;case"west":resizerPos=dragPos.left;break;case"south":resizerPos=sC.offsetHeight-dragPos.top-o.spacing_open;break;case"east":resizerPos=sC.offsetWidth-dragPos.left-o.spacing_open;break}if(resizingDone){$("div.ui-layout-mask").each(function(){this.parentNode.removeChild(this)});if(false===_execCallback(pane,o.ondrag_end||o.ondrag)){return false}}else{$Frames.each(function(){$("#"+$(this).data("layoutMaskID")).css($(this).position()).css({width:this.offsetWidth+"px",height:this.offsetHeight+"px"})})}newSize=resizerPos-sC["inset"+c.side];manualSizePane(pane,newSize)}})};var destroy=function(){$(window).unbind("."+sID);$(document).unbind("."+sID);window[sID]=null;var fullPage=(sC.tagName=="BODY"),_open="-open",_sliding="-sliding",_closed="-closed",$P,root,pRoot,pClasses;$.each(_c.allPanes.split(","),function(i,pane){$P=$Ps[pane];if(!$P){return true}if(pane!="center"){if($Ts[pane]){$Ts[pane].remove()}$Rs[pane].remove()}root=options[pane].paneClass;pRoot=root+"-"+pane;pClasses=[root,root+_open,root+_closed,root+_sliding,pRoot,pRoot+_open,pRoot+_closed,pRoot+_sliding];$.merge(pClasses,getHoverClasses($P,true));$P.removeClass(pClasses.join(" ")).removeData("layoutRole").removeData("layoutEdge").unbind("."+sID).unbind("mouseenter").unbind("mouseleave");if(!$P.data("layoutContainer")){$P.css($P.data("layoutCSS"))}});$Container.removeData("layoutContainer");if(!$Container.data("layoutEdge")){$Container.css($Container.data("layoutCSS"))}if(fullPage){$("html").css($("html").data("layoutCSS"))}unload();var n=options.name;if(n&&window[n]){window[n]=null}};var hide=function(pane,noAnimation){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane];if(!$P||s.isHidden){return}if(state.initialized&&false===_execCallback(pane,o.onhide_start)){return}s.isSliding=false;if($R){$R.hide()}if(!state.initialized||s.isClosed){s.isClosed=true;s.isHidden=true;s.isVisible=false;$P.hide();sizeMidPanes(_c[pane].dir=="horz"?"all":"center");if(state.initialized||o.triggerEventsOnLoad){_execCallback(pane,o.onhide_end||o.onhide)}}else{s.isHiding=true;close(pane,false,noAnimation)}};var show=function(pane,openPane,noAnimation,noAlert){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane];if(!$P||!s.isHidden){return}if(false===_execCallback(pane,o.onshow_start)){return}s.isSliding=false;s.isShowing=true;if(openPane===false){close(pane,true)}else{open(pane,false,noAnimation,noAlert)}};var toggle=function(pane,slide){if(!isStr(pane)){pane.stopImmediatePropagation();pane=$(this).data("layoutEdge")}var s=state[str(pane)];if(s.isHidden){show(pane)}else{if(s.isClosed){open(pane,!!slide)}else{close(pane)}}};var _closePane=function(pane,setHandles){var $P=$Ps[pane],s=state[pane];$P.hide();s.isClosed=true;s.isVisible=false};var close=function(pane,force,noAnimation,skipCallback){if(!state.initialized){_closePane(pane);return}var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],doFX=!noAnimation&&!s.isClosed&&(o.fxName_close!="none"),isShowing=s.isShowing,isHiding=s.isHiding,wasSliding=s.isSliding;delete s.isShowing;delete s.isHiding;if(!$P||!o.closable){return}else{if(!force&&s.isClosed&&!isShowing){return}}if(_c.isLayoutBusy){_queue("close",pane,force);return}if(!isShowing&&false===_execCallback(pane,o.onclose_start)){return}_c[pane].isMoving=true;_c.isLayoutBusy=true;s.isClosed=true;s.isVisible=false;if(isHiding){s.isHidden=true}else{if(isShowing){s.isHidden=false}}if(s.isSliding){bindStopSlidingEvents(pane,false)}else{sizeMidPanes(_c[pane].dir=="horz"?"all":"center",false)}setAsClosed(pane);if(doFX){lockPaneForFX(pane,true);$P.hide(o.fxName_close,o.fxSettings_close,o.fxSpeed_close,function(){lockPaneForFX(pane,false);close_2()})}else{$P.hide();close_2()}function close_2(){if(s.isClosed){bindStartSlidingEvent(pane,true);var altPane=_c.altSide[pane];if(state[altPane].noRoom){setSizeLimits(altPane);makePaneFit(altPane)}if(!skipCallback&&(state.initialized||o.triggerEventsOnLoad)){if(!isShowing&&!wasSliding){_execCallback(pane,o.onclose_end||o.onclose)}if(isShowing){_execCallback(pane,o.onshow_end||o.onshow)}if(isHiding){_execCallback(pane,o.onhide_end||o.onhide)}}}_dequeue(pane)}};var setAsClosed=function(pane){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,rClass=o.resizerClass,tClass=o.togglerClass,_pane="-"+pane,_open="-open",_sliding="-sliding",_closed="-closed";$R.css(side,sC[inset]).removeClass(rClass+_open+" "+rClass+_pane+_open).removeClass(rClass+_sliding+" "+rClass+_pane+_sliding).addClass(rClass+_closed+" "+rClass+_pane+_closed).unbind("dblclick."+sID);if(o.resizable&&typeof $.fn.draggable=="function"){$R.draggable("disable").removeClass("ui-state-disabled").css("cursor","default").attr("title","")}if($T){$T.removeClass(tClass+_open+" "+tClass+_pane+_open).addClass(tClass+_closed+" "+tClass+_pane+_closed).attr("title",o.togglerTip_closed);$T.children(".content-open").hide();$T.children(".content-closed").css("display","block")}syncPinBtns(pane,false);if(state.initialized){sizeHandles("all")}};var open=function(pane,slide,noAnimation,noAlert){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],doFX=!noAnimation&&s.isClosed&&(o.fxName_open!="none"),isShowing=s.isShowing;delete s.isShowing;if(!$P||(!o.resizable&&!o.closable)){return}else{if(s.isVisible&&!s.isSliding){return}}if(s.isHidden&&!isShowing){show(pane,true);return}if(_c.isLayoutBusy){_queue("open",pane,slide);return}if(false===_execCallback(pane,o.onopen_start)){return}setSizeLimits(pane,slide);if(s.minSize>s.maxSize){syncPinBtns(pane,false);if(!noAlert&&o.noRoomToOpenTip){alert(o.noRoomToOpenTip)}return}_c[pane].isMoving=true;_c.isLayoutBusy=true;if(slide){bindStopSlidingEvents(pane,true)}else{if(s.isSliding){bindStopSlidingEvents(pane,false)}else{if(o.slidable){bindStartSlidingEvent(pane,false)}}}s.noRoom=false;makePaneFit(pane);s.isVisible=true;s.isClosed=false;if(isShowing){s.isHidden=false}if(doFX){lockPaneForFX(pane,true);$P.show(o.fxName_open,o.fxSettings_open,o.fxSpeed_open,function(){lockPaneForFX(pane,false);open_2()})}else{$P.show();open_2()}function open_2(){if(s.isVisible){_fixIframe(pane);if(!s.isSliding){sizeMidPanes(_c[pane].dir=="vert"?"center":"all",false)}setAsOpen(pane)}_dequeue(pane)}};var setAsOpen=function(pane,skipCallback){var $P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],o=options[pane],s=state[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,rClass=o.resizerClass,tClass=o.togglerClass,_pane="-"+pane,_open="-open",_closed="-closed",_sliding="-sliding";$R.css(side,sC[inset]+getPaneSize(pane)).removeClass(rClass+_closed+" "+rClass+_pane+_closed).addClass(rClass+_open+" "+rClass+_pane+_open);if(s.isSliding){$R.addClass(rClass+_sliding+" "+rClass+_pane+_sliding)}else{$R.removeClass(rClass+_sliding+" "+rClass+_pane+_sliding)}if(o.resizerDblClickToggle){$R.bind("dblclick",toggle)}removeHover(0,$R);if(o.resizable&&typeof $.fn.draggable=="function"){$R.draggable("enable").css("cursor",o.resizerCursor).attr("title",o.resizerTip)}else{if(!s.isSliding){$R.css("cursor","default")}}if($T){$T.removeClass(tClass+_closed+" "+tClass+_pane+_closed).addClass(tClass+_open+" "+tClass+_pane+_open).attr("title",o.togglerTip_open);removeHover(0,$T);$T.children(".content-closed").hide();$T.children(".content-open").css("display","block")}syncPinBtns(pane,!s.isSliding);$.extend(s,getElemDims($P));if(state.initialized){sizeHandles("all");sizeContent(pane,true)}if(!skipCallback&&(state.initialized||o.triggerEventsOnLoad)&&$P.is(":visible")){_execCallback(pane,o.onopen_end||o.onopen);if(s.isShowing){_execCallback(pane,o.onshow_end||o.onshow)}if(state.initialized){_execCallback(pane,o.onresize_end||o.onresize)}}};var slideOpen=function(evt_or_pane){var type=typeof evt_or_pane,pane=(type=="string"?evt_or_pane:$(this).data("layoutEdge"));if(type=="object"){evt_or_pane.stopImmediatePropagation()}if(state[pane].isClosed){open(pane,true)}else{bindStopSlidingEvents(pane,true)}};var slideClose=function(evt_or_pane){var $E=(isStr(evt_or_pane)?$Ps[evt_or_pane]:$(this)),pane=$E.data("layoutEdge"),o=options[pane],s=state[pane],$P=$Ps[pane];if(s.isClosed||s.isResizing){return}else{if(o.slideTrigger_close=="click"){close_NOW()}else{if(o.preventQuickSlideClose&&_c.isLayoutBusy){return}else{timer.set(pane+"_closeSlider",close_NOW,300)}}}function close_NOW(e){if(s.isClosed){bindStopSlidingEvents(pane,false)}else{close(pane)}}};var slideToggle=function(pane){toggle(pane,true)};var lockPaneForFX=function(pane,doLock){var $P=$Ps[pane];if(doLock){$P.css({zIndex:_c.zIndex.pane_animate});if(pane=="south"){$P.css({top:sC.insetTop+sC.innerHeight-$P.outerHeight()})}else{if(pane=="east"){$P.css({left:sC.insetLeft+sC.innerWidth-$P.outerWidth()})}}}else{if(pane=="south"){$P.css({top:"auto"})}else{if(pane=="east"){$P.css({left:"auto"})}}var o=options[pane];if(state.browser.msie&&o.fxOpacityFix&&o.fxName_open!="slide"&&$P.css("filter")&&$P.css("opacity")==1){$P[0].style.removeAttribute("filter")}}};var bindStartSlidingEvent=function(pane,enable){var o=options[pane],z=_c.zIndex,$P=$Ps[pane],$R=$Rs[pane],trigger=o.slideTrigger_open;if(!$R||!o.slidable){return}if(trigger.match(/mouseover/)){trigger=o.slideTrigger_open="mouseenter"}else{if(!trigger.match(/click|dblclick|mouseenter/)){trigger=o.slideTrigger_open="click"}}$R.css("zIndex",!enable?z.pane_sliding:z.resizer_normal);$P.css("zIndex",!enable?z.pane_sliding:z.pane_normal);$R[enable?"bind":"unbind"](trigger+"."+sID,slideOpen).css("cursor",enable?o.sliderCursor:"default").attr("title",enable?o.sliderTip:"")};var bindStopSlidingEvents=function(pane,enable){var o=options[pane],s=state[pane],trigger=o.slideTrigger_close,action=(enable?"bind":"unbind"),$P=$Ps[pane],$R=$Rs[pane];s.isSliding=enable;timer.clear(pane+"_closeSlider");if(enable){bindStartSlidingEvent(pane,false)}if(!trigger.match(/click|mouseleave/)){trigger=o.slideTrigger_close="mouseleave"}$R[action](trigger,slideClose);if(trigger=="mouseleave"){$P[action]("mouseleave."+sID,slideClose);$R[action]("mouseenter."+sID,cancelMouseOut);$P[action]("mouseenter."+sID,cancelMouseOut)}if(!enable){timer.clear(pane+"_closeSlider")}else{if(trigger=="click"&&!o.resizable){$R.css("cursor",enable?o.sliderCursor:"default");$R.attr("title",enable?o.togglerTip_open:"")}}function cancelMouseOut(evt){timer.clear(pane+"_closeSlider");evt.stopPropagation()}};var makePaneFit=function(pane,isOpening,skipCallback,force){var o=options[pane],s=state[pane],c=_c[pane],$P=$Ps[pane],$R=$Rs[pane],isSidePane=c.dir=="vert",hasRoom=false;if(pane=="center"||(isSidePane&&s.noVerticalRoom)){hasRoom=s.minHeight<=s.maxHeight&&(isSidePane||s.minWidth<=s.maxWidth);if(hasRoom&&s.noRoom){$P.show();if($R){$R.show()}s.isVisible=true;s.noRoom=false;if(isSidePane){s.noVerticalRoom=false}_fixIframe(pane)}else{if(!hasRoom&&!s.noRoom){$P.hide();if($R){$R.hide()}s.isVisible=false;s.noRoom=true}}}if(pane=="center"){}else{if(s.minSize<=s.maxSize){hasRoom=true;if(s.size>s.maxSize){sizePane(pane,s.maxSize,skipCallback,force)}else{if(s.size<s.minSize){sizePane(pane,s.minSize,skipCallback,force)}else{if($R&&$P.is(":visible")){var side=c.side.toLowerCase(),pos=s.size+sC["inset"+c.side];if(_cssNum($R,side)!=pos){$R.css(side,pos)}}}}if(s.noRoom){if(s.wasOpen&&o.closable){if(o.autoReopen){open(pane,false,true,true)}else{s.noRoom=false}}else{show(pane,s.wasOpen,true,true)}}}else{if(!s.noRoom){s.noRoom=true;s.wasOpen=!s.isClosed&&!s.isSliding;if(o.closable){close(pane,true,true)}else{hide(pane,true)}}}}};var manualSizePane=function(pane,size,skipCallback){var o=options[pane],forceResize=o.resizeWhileDragging&&!_c.isLayoutBusy;o.autoResize=false;sizePane(pane,size,skipCallback,forceResize)};var sizePane=function(pane,size,skipCallback,force){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],side=_c[pane].side.toLowerCase(),inset="inset"+_c[pane].side,skipResizeWhileDragging=_c.isLayoutBusy&&!o.triggerEventsWhileDragging,oldSize;setSizeLimits(pane);oldSize=s.size;size=_parseSize(pane,size);size=max(size,_parseSize(pane,o.minSize));size=min(size,s.maxSize);if(size<s.minSize){makePaneFit(pane,false,skipCallback);return}if(!force&&size==oldSize){return}if(!skipCallback&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_start)}$P.css(_c[pane].sizeType.toLowerCase(),max(1,cssSize(pane,size)));s.size=size;$.extend(s,getElemDims($P));if($R&&$P.is(":visible")){$R.css(side,size+sC[inset])}sizeContent(pane);if(!skipCallback&&!skipResizeWhileDragging&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_end||o.onresize);if(o.resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}}if(!skipCallback){if(!s.isSliding){sizeMidPanes(_c[pane].dir=="horz"?"all":"center",skipResizeWhileDragging,force)}sizeHandles("all")}var altPane=_c.altSide[pane];if(size<oldSize&&state[altPane].noRoom){setSizeLimits(altPane);makePaneFit(altPane,false,skipCallback)}};var sizeMidPanes=function(panes,skipCallback,force){if(!panes||panes=="all"){panes="east,west,center"}$.each(panes.split(","),function(i,pane){if(!$Ps[pane]){return}var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],isCenter=(pane=="center"),hasRoom=true,CSS={},d=calcNewCenterPaneDims();$.extend(s,getElemDims($P));if(pane=="center"){if(!force&&s.isVisible&&d.width==s.outerWidth&&d.height==s.outerHeight){return true}$.extend(s,cssMinDims(pane),{maxWidth:d.width,maxHeight:d.height});CSS=d;CSS.width=cssW(pane,d.width);CSS.height=cssH(pane,d.height);hasRoom=CSS.width>0&&CSS.height>0;if(!hasRoom&&!state.initialized&&o.minWidth>0){var reqPx=o.minWidth-s.outerWidth,minE=options.east.minSize||0,minW=options.west.minSize||0,sizeE=state.east.size,sizeW=state.west.size,newE=sizeE,newW=sizeW;if(reqPx>0&&state.east.isVisible&&sizeE>minE){newE=max(sizeE-minE,sizeE-reqPx);reqPx-=sizeE-newE}if(reqPx>0&&state.west.isVisible&&sizeW>minW){newW=max(sizeW-minW,sizeW-reqPx);reqPx-=sizeW-newW}if(reqPx==0){if(sizeE!=minE){sizePane("east",newE,true)}if(sizeW!=minW){sizePane("west",newW,true)}sizeMidPanes("center",skipCallback,force);return}}}else{$.extend(s,getElemDims($P),cssMinDims(pane));if(!force&&!s.noVerticalRoom&&d.height==s.outerHeight){return true}CSS.top=d.top;CSS.bottom=d.bottom;CSS.height=cssH(pane,d.height);s.maxHeight=max(0,CSS.height);hasRoom=(s.maxHeight>0);if(!hasRoom){s.noVerticalRoom=true}}if(hasRoom){if(!skipCallback&&state.initialized){_execCallback(pane,o.onresize_start)}$P.css(CSS);$.extend(s,getElemDims($P));if(s.noRoom){makePaneFit(pane)}if(state.initialized){sizeContent(pane)}}else{if(!s.noRoom&&s.isVisible){makePaneFit(pane)}}if(pane=="center"){var b=state.browser;var fix=b.isIE6||(b.msie&&!b.boxModel);if($Ps.north&&(fix||state.north.tagName=="IFRAME")){$Ps.north.css("width",cssW($Ps.north,sC.innerWidth))}if($Ps.south&&(fix||state.south.tagName=="IFRAME")){$Ps.south.css("width",cssW($Ps.south,sC.innerWidth))}}if(!skipCallback&&state.initialized&&s.isVisible){_execCallback(pane,o.onresize_end||o.onresize);if(o.resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}}})};var resizeAll=function(){var oldW=sC.innerWidth,oldH=sC.innerHeight;$.extend(state.container,getElemDims($Container));if(!sC.outerHeight){return}if(false===_execCallback(null,options.onresizeall_start)){return false}var shrunkH=(sC.innerHeight<oldH),shrunkW=(sC.innerWidth<oldW),$P,o,s,dir;$.each(["south","north","east","west"],function(i,pane){if(!$Ps[pane]){return}s=state[pane];o=options[pane];dir=_c[pane].dir;if(o.autoResize&&s.size!=o.size){sizePane(pane,o.size,true,true)}else{setSizeLimits(pane);makePaneFit(pane,false,true,true)}});sizeMidPanes("all",true,true);sizeHandles("all");o=options;$.each(_c.allPanes.split(","),function(i,pane){$P=$Ps[pane];if(!$P){return}if(state[pane].isVisible){_execCallback(pane,o[pane].onresize_end||o[pane].onresize)}if(o[pane].resizeNestedLayout&&$P.data("layoutContainer")){$P.layout().resizeAll()}});_execCallback(null,o.onresizeall_end||o.onresizeall)};var sizeContent=function(panes,remeasure){if(!panes||panes=="all"){panes=_c.allPanes}$.each(panes.split(","),function(idx,pane){var $P=$Ps[pane],$C=$Cs[pane],o=options[pane],s=state[pane],m=s.content;if(!$P||!$C||!$P.is(":visible")){return true}if(false===_execCallback(null,o.onsizecontent_start)){return}if(!_c.isLayoutBusy||m.top==undefined||remeasure||o.resizeContentWhileDragging){_measure();if(m.hiddenFooters>0&&$P.css("overflow")=="hidden"){$P.css("overflow","visible");_measure();$P.css("overflow","hidden")}}var newH=s.innerHeight-(m.spaceAbove-s.css.paddingTop)-(m.spaceBelow-s.css.paddingBottom);if(!$C.is(":visible")||m.height!=newH){setOuterHeight($C,newH,true);m.height=newH}if(state.initialized){_execCallback(pane,o.onsizecontent_end||o.onsizecontent);if(o.resizeNestedLayout&&$C.data("layoutContainer")){$C.layout().resizeAll()}}function _below($E){return max(s.css.paddingBottom,(parseInt($E.css("marginBottom"))||0))}function _measure(){var ignore=options[pane].contentIgnoreSelector,$Fs=$C.nextAll().not(ignore||":lt(0)"),$Fs_vis=$Fs.filter(":visible"),$F=$Fs_vis.filter(":last");m={top:$C[0].offsetTop,height:$C.outerHeight(),numFooters:$Fs.length,hiddenFooters:$Fs.length-$Fs_vis.length,spaceBelow:0};m.spaceAbove=m.top;m.bottom=m.top+m.height;if($F.length){m.spaceBelow=($F[0].offsetTop+$F.outerHeight())-m.bottom+_below($F)}else{m.spaceBelow=_below($C)}}})};var sizeHandles=function(panes){if(!panes||panes=="all"){panes=_c.borderPanes}$.each(panes.split(","),function(i,pane){var o=options[pane],s=state[pane],$P=$Ps[pane],$R=$Rs[pane],$T=$Ts[pane],$TC;if(!$P||!$R){return}var dir=_c[pane].dir,_state=(s.isClosed?"_closed":"_open"),spacing=o["spacing"+_state],togAlign=o["togglerAlign"+_state],togLen=o["togglerLength"+_state],paneLen,offset,CSS={};if(spacing==0){$R.hide();return}else{if(!s.noRoom&&!s.isHidden){$R.show()}}if(dir=="horz"){paneLen=$P.outerWidth();s.resizerLength=paneLen;$R.css({width:max(1,cssW($R,paneLen)),height:max(0,cssH($R,spacing)),left:_cssNum($P,"left")})}else{paneLen=$P.outerHeight();s.resizerLength=paneLen;$R.css({height:max(1,cssH($R,paneLen)),width:max(0,cssW($R,spacing)),top:sC.insetTop+getPaneSize("north",true)})}removeHover(o,$R);if($T){if(togLen==0||(s.isSliding&&o.hideTogglerOnSlide)){$T.hide();return}else{$T.show()}if(!(togLen>0)||togLen=="100%"||togLen>paneLen){togLen=paneLen;offset=0}else{if(isStr(togAlign)){switch(togAlign){case"top":case"left":offset=0;break;case"bottom":case"right":offset=paneLen-togLen;break;case"middle":case"center":default:offset=Math.floor((paneLen-togLen)/2)}}else{var x=parseInt(togAlign);if(togAlign>=0){offset=x}else{offset=paneLen-togLen+x}}}if(dir=="horz"){var width=cssW($T,togLen);$T.css({width:max(0,width),height:max(1,cssH($T,spacing)),left:offset,top:0});$T.children(".content").each(function(){$TC=$(this);$TC.css("marginLeft",Math.floor((width-$TC.outerWidth())/2))})}else{var height=cssH($T,togLen);$T.css({height:max(0,height),width:max(1,cssW($T,spacing)),top:offset,left:0});$T.children(".content").each(function(){$TC=$(this);$TC.css("marginTop",Math.floor((height-$TC.outerHeight())/2))})}removeHover(0,$T)}if(!state.initialized&&o.initHidden){$R.hide();if($T){$T.hide()}}})};var swapPanes=function(pane1,pane2){state[pane1].edge=pane2;state[pane2].edge=pane1;var cancelled=false;if(false===_execCallback(pane1,options[pane1].onswap_start)){cancelled=true}if(!cancelled&&false===_execCallback(pane2,options[pane2].onswap_start)){cancelled=true}if(cancelled){state[pane1].edge=pane1;state[pane2].edge=pane2;return}var oPane1=copy(pane1),oPane2=copy(pane2),sizes={};sizes[pane1]=oPane1?oPane1.state.size:0;sizes[pane2]=oPane2?oPane2.state.size:0;$Ps[pane1]=false;$Ps[pane2]=false;state[pane1]={};state[pane2]={};if($Ts[pane1]){$Ts[pane1].remove()}if($Ts[pane2]){$Ts[pane2].remove()}if($Rs[pane1]){$Rs[pane1].remove()}if($Rs[pane2]){$Rs[pane2].remove()}$Rs[pane1]=$Rs[pane2]=$Ts[pane1]=$Ts[pane2]=false;move(oPane1,pane2);move(oPane2,pane1);oPane1=oPane2=sizes=null;if($Ps[pane1]){$Ps[pane1].css(_c.visible)}if($Ps[pane2]){$Ps[pane2].css(_c.visible)}resizeAll();_execCallback(pane1,options[pane1].onswap_end||options[pane1].onswap);_execCallback(pane2,options[pane2].onswap_end||options[pane2].onswap);return;function copy(n){var $P=$Ps[n],$C=$Cs[n];return !$P?false:{pane:n,P:$P?$P[0]:false,C:$C?$C[0]:false,state:$.extend({},state[n]),options:$.extend({},options[n])}}function move(oPane,pane){if(!oPane){return}var P=oPane.P,C=oPane.C,oldPane=oPane.pane,c=_c[pane],side=c.side.toLowerCase(),inset="inset"+c.side,s=$.extend({},state[pane]),o=options[pane],fx={resizerCursor:o.resizerCursor},re,size,pos;$.each("fxName,fxSpeed,fxSettings".split(","),function(i,k){fx[k]=o[k];fx[k+"_open"]=o[k+"_open"];fx[k+"_close"]=o[k+"_close"]});$Ps[pane]=$(P).data("layoutEdge",pane).css(_c.hidden).css(c.cssReq);$Cs[pane]=C?$(C):false;options[pane]=$.extend({},oPane.options,fx);state[pane]=$.extend({},oPane.state);re=new RegExp(o.paneClass+"-"+oldPane,"g");P.className=P.className.replace(re,o.paneClass+"-"+pane);initHandles(pane);initResizable(pane);if(c.dir!=_c[oldPane].dir){size=sizes[pane]||0;setSizeLimits(pane);size=max(size,state[pane].minSize);manualSizePane(pane,size,true)}else{$Rs[pane].css(side,sC[inset]+(state[pane].isVisible?getPaneSize(pane):0))}if(oPane.state.isVisible&&!s.isVisible){setAsOpen(pane,true)}else{setAsClosed(pane,true);bindStartSlidingEvent(pane,true)}oPane=null}};function keyDown(evt){if(!evt){return true}var code=evt.keyCode;if(code<33){return true}var PANE={38:"north",40:"south",37:"west",39:"east"},ALT=evt.altKey,SHIFT=evt.shiftKey,CTRL=evt.ctrlKey,CURSOR=(CTRL&&code>=37&&code<=40),o,k,m,pane;if(CURSOR&&options[PANE[code]].enableCursorHotkey){pane=PANE[code]}else{if(CTRL||SHIFT){$.each(_c.borderPanes.split(","),function(i,p){o=options[p];k=o.customHotkey;m=o.customHotkeyModifier;if((SHIFT&&m=="SHIFT")||(CTRL&&m=="CTRL")||(CTRL&&SHIFT)){if(k&&code==(isNaN(k)||k<=9?k.toUpperCase().charCodeAt(0):k)){pane=p;return false}}})}}if(!pane||!$Ps[pane]||!options[pane].closable||state[pane].isHidden){return true}toggle(pane);evt.stopPropagation();evt.returnValue=false;return false}function allowOverflow(el){if(this&&this.tagName){el=this}var $P;if(isStr(el)){$P=$Ps[el]}else{if($(el).data("layoutRole")){$P=$(el)}else{$(el).parents().each(function(){if($(this).data("layoutRole")){$P=$(this);return false}})}}if(!$P||!$P.length){return}var pane=$P.data("layoutEdge"),s=state[pane];if(s.cssSaved){resetOverflow(pane)}if(s.isSliding||s.isResizing||s.isClosed){s.cssSaved=false;return}var newCSS={zIndex:(_c.zIndex.pane_normal+2)},curCSS={},of=$P.css("overflow"),ofX=$P.css("overflowX"),ofY=$P.css("overflowY");if(of!="visible"){curCSS.overflow=of;newCSS.overflow="visible"}if(ofX&&!ofX.match(/visible|auto/)){curCSS.overflowX=ofX;newCSS.overflowX="visible"}if(ofY&&!ofY.match(/visible|auto/)){curCSS.overflowY=ofX;newCSS.overflowY="visible"}s.cssSaved=curCSS;$P.css(newCSS);$.each(_c.allPanes.split(","),function(i,p){if(p!=pane){resetOverflow(p)}})}function resetOverflow(el){if(this&&this.tagName){el=this}var $P;if(isStr(el)){$P=$Ps[el]}else{if($(el).data("layoutRole")){$P=$(el)}else{$(el).parents().each(function(){if($(this).data("layoutRole")){$P=$(this);return false}})}}if(!$P||!$P.length){return}var pane=$P.data("layoutEdge"),s=state[pane],CSS=s.cssSaved||{};if(!s.isSliding&&!s.isResizing){$P.css("zIndex",_c.zIndex.pane_normal)}$P.css(CSS);s.cssSaved=false}function getBtn(selector,pane,action){var $E=$(selector);if(!$E.length){alert(lang.errButton+lang.selector+": "+selector)}else{if(_c.borderPanes.indexOf(pane)==-1){alert(lang.errButton+lang.Pane.toLowerCase()+": "+pane)}else{var btn=options[pane].buttonClass+"-"+action;$E.addClass(btn+" "+btn+"-"+pane).data("layoutName",options.name);return $E}}return false}function bindButton(selector,action,pane){switch(action.toLowerCase()){case"toggle":addToggleBtn(selector,pane);break;case"open":addOpenBtn(selector,pane);break;case"close":addCloseBtn(selector,pane);break;case"pin":addPinBtn(selector,pane);break;case"toggle-slide":addToggleBtn(selector,pane,true);break;case"open-slide":addOpenBtn(selector,pane,true);break}}function addToggleBtn(selector,pane,slide){var $E=getBtn(selector,pane,"toggle");if($E){$E.click(function(evt){toggle(pane,!!slide);evt.stopPropagation()})}}function addOpenBtn(selector,pane,slide){var $E=getBtn(selector,pane,"open");if($E){$E.attr("title",lang.Open).click(function(evt){open(pane,!!slide);evt.stopPropagation()})}}function addCloseBtn(selector,pane){var $E=getBtn(selector,pane,"close");if($E){$E.attr("title",lang.Close).click(function(evt){close(pane);evt.stopPropagation()})}}function addPinBtn(selector,pane){var $E=getBtn(selector,pane,"pin");if($E){var s=state[pane];$E.click(function(evt){setPinState($(this),pane,(s.isSliding||s.isClosed));if(s.isSliding||s.isClosed){open(pane)}else{close(pane)}evt.stopPropagation()});setPinState($E,pane,(!s.isClosed&&!s.isSliding));_c[pane].pins.push(selector)}}function syncPinBtns(pane,doPin){$.each(_c[pane].pins,function(i,selector){setPinState($(selector),pane,doPin)})}function setPinState($Pin,pane,doPin){var updown=$Pin.attr("pin");if(updown&&doPin==(updown=="down")){return}var pin=options[pane].buttonClass+"-pin",side=pin+"-"+pane,UP=pin+"-up "+side+"-up",DN=pin+"-down "+side+"-down";$Pin.attr("pin",doPin?"down":"up").attr("title",doPin?lang.Unpin:lang.Pin).removeClass(doPin?UP:DN).addClass(doPin?DN:UP)}function isCookiesEnabled(){return(navigator.cookieEnabled!=0)}function getCookie(opts){var o=$.extend({},options.cookie,opts||{}),name=o.name||options.name||"Layout",c=document.cookie,cs=c?c.split(";"):[],pair;for(var i=0,n=cs.length;i<n;i++){pair=$.trim(cs[i]).split("=");if(pair[0]==name){return decodeJSON(decodeURIComponent(pair[1]))}}return""}function saveCookie(keys,opts){var o=$.extend({},options.cookie,opts||{}),name=o.name||options.name||"Layout",params="",date="",clear=false;if(o.expires.toUTCString){date=o.expires}else{if(typeof o.expires=="number"){date=new Date();if(o.expires>0){date.setDate(date.getDate()+o.expires)}else{date.setYear(1970);clear=true}}}if(date){params+=";expires="+date.toUTCString()}if(o.path){params+=";path="+o.path}if(o.domain){params+=";domain="+o.domain}if(o.secure){params+=";secure"}if(clear){state.cookie={};document.cookie=name+"="+params}else{state.cookie=getState(keys||o.keys);document.cookie=name+"="+encodeURIComponent(encodeJSON(state.cookie))+params}return $.extend({},state.cookie)}function deleteCookie(){saveCookie("",{expires:-1})}function loadCookie(opts){var o=getCookie(opts);if(o){state.cookie=$.extend({},o);loadState(o)}return o}function loadState(opts){$.extend(true,options,opts)}function getState(keys){var data={},alt={isClosed:"initClosed",isHidden:"initHidden"},pair,pane,key,val;if(!keys){keys=options.cookie.keys}if($.isArray(keys)){keys=keys.join(",")}keys=keys.replace(/__/g,".").split(",");for(var i=0,n=keys.length;i<n;i++){pair=keys[i].split(".");pane=pair[0];key=pair[1];if(_c.allPanes.indexOf(pane)<0){continue}val=state[pane][key];if(val==undefined){continue}if(key=="isClosed"&&state[pane]["isSliding"]){val=true}(data[pane]||(data[pane]={}))[alt[key]?alt[key]:key]=val}return data}function encodeJSON(JSON){return parse(JSON);function parse(h){var D=[],i=0,k,v,t;for(k in h){v=h[k];t=typeof v;if(t=="string"){v='"'+v+'"'}else{if(t=="object"){v=parse(v)}}D[i++]='"'+k+'":'+v}return"{"+D.join(",")+"}"}}function decodeJSON(str){try{return window["eval"]("("+str+")")||{}}catch(e){return{}}}var $Container=$(this).eq(0);if(!$Container.length){return null}if($Container.data("layoutContainer")){return $.extend({},window[$Container.data("layoutContainer")])}var $Ps={},$Cs={},$Rs={},$Ts={},sC=state.container,sID=state.id;_create();var Instance={options:options,state:state,container:$Container,panes:$Ps,contents:$Cs,resizers:$Rs,togglers:$Ts,toggle:toggle,hide:hide,show:show,open:open,close:close,slideOpen:slideOpen,slideClose:slideClose,slideToggle:slideToggle,initContent:initContent,sizeContent:sizeContent,sizePane:manualSizePane,swapPanes:swapPanes,resizeAll:resizeAll,destroy:destroy,setSizeLimits:setSizeLimits,bindButton:bindButton,addToggleBtn:addToggleBtn,addOpenBtn:addOpenBtn,addCloseBtn:addCloseBtn,addPinBtn:addPinBtn,allowOverflow:allowOverflow,resetOverflow:resetOverflow,encodeJSON:encodeJSON,decodeJSON:decodeJSON,getState:getState,getCookie:getCookie,saveCookie:saveCookie,deleteCookie:deleteCookie,loadCookie:loadCookie,loadState:loadState,cssWidth:cssW,cssHeight:cssH};window[sID]=Instance;return Instance}})(jQuery);
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index afd0293fe1..6d1caf6d50 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -2,14 +2,29 @@
// code by Gilles Dubochet with contributions by Pedro Furlanetto
$(document).ready(function(){
+
+ // Escapes special characters and returns a valid jQuery selector
+ function escapeJquery(str){
+ return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1');
+ }
+
+ // highlight and jump to selected member
+ if (window.location.hash) {
+ var temp = window.location.hash.replace('#', '');
+ var elem = '#'+escapeJquery(temp);
+
+ window.scrollTo(0, 0);
+ $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000);
+ $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000);
+ }
+
var isHiddenClass = function (name) {
return name == 'scala.Any' ||
- name == 'scala.AnyRef' ||
- name == 'scala.Predef.any2stringfmt' ||
- name == 'scala.Predef.any2stringadd' ||
- name == 'scala.Predef.any2ArrowAssoc' ||
- name == 'scala.Predef.any2Ensuring' ||
- name == 'scala.collection.TraversableOnce.alternateImplicit'
+ name == 'scala.AnyRef';
+ };
+
+ var isHidden = function (elem) {
+ return $(elem).attr("data-hidden") == 'true';
};
$("#linearization li:gt(0)").filter(function(){
@@ -17,7 +32,7 @@ $(document).ready(function(){
}).removeClass("in").addClass("out");
$("#implicits li").filter(function(){
- return isHiddenClass($(this).attr("name"));
+ return isHidden(this);
}).removeClass("in").addClass("out");
// Pre-filter members
@@ -102,6 +117,12 @@ $(document).ready(function(){
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
$("#implicits li.in").removeClass("in").addClass("out");
+
+ if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) {
+ $(this).removeClass("out").addClass("in");
+ $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out");
+ }
+
filter();
})
$("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
@@ -113,10 +134,15 @@ $(document).ready(function(){
var filteredImplicits =
$("#implicits li.out").filter(function() {
- return ! isHiddenClass($(this).attr("name"));
+ return ! isHidden(this);
});
filteredImplicits.removeClass("out").addClass("in");
+ if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) {
+ $(this).removeClass("out").addClass("in");
+ $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out");
+ }
+
filter();
});
$("#visbl > ol > li.public").click(function() {
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index 46b2a11d4a..a63849e3f6 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -196,6 +196,9 @@ trait MemberEntity extends Entity {
/** The identity of this member, used for linking */
def signature: String
+ /** Compatibility signature, will be removed from future versions */
+ def signatureCompat: String
+
/** Indicates whether the member is inherited by implicit conversion */
def isImplicitlyInherited: Boolean
@@ -530,8 +533,8 @@ trait ImplicitConversion {
/** The members inherited by this implicit conversion */
def members: List[MemberEntity]
- /** Is this a common implicit conversion (aka conversion that affects all classes, in Predef?) */
- def isCommonConversion: Boolean
+ /** Is this a hidden implicit conversion (as specified in the settings) */
+ def isHiddenConversion: Boolean
}
/** Shadowing captures the information that the member is shadowed by some other members
@@ -625,4 +628,4 @@ trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
/** toString for debugging */
override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
upperBound.name + ")"
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala
index 664567872e..737c6a7577 100644
--- a/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala
@@ -9,10 +9,10 @@ package model
import scala.collection._
abstract sealed class LinkTo
-case class LinkToTpl(tpl: DocTemplateEntity) extends LinkTo
-case class LinkToMember(mbr: MemberEntity, inTpl: DocTemplateEntity) extends LinkTo
-case class Tooltip(name: String) extends LinkTo { def this(tpl: TemplateEntity) = this(tpl.qualifiedName) }
-// case class LinkToExternal(name: String, url: String) extends LinkTo // for SI-191, whenever Manohar will have time
+final case class LinkToTpl(tpl: DocTemplateEntity) extends LinkTo
+final case class LinkToMember(mbr: MemberEntity, inTpl: DocTemplateEntity) extends LinkTo
+final case class Tooltip(name: String) extends LinkTo { def this(tpl: TemplateEntity) = this(tpl.qualifiedName) }
+final case class LinkToExternal(name: String, url: String) extends LinkTo
case object NoLink extends LinkTo // you should use Tooltip if you have a name from the user, this is only in case all fails
object LinkToTpl {
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
index ab14498a7c..7ab73cceff 100644
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -11,6 +11,7 @@ trait MemberLookup {
thisFactory: ModelFactory =>
import global._
+ import rootMirror.RootPackage, rootMirror.EmptyPackage
def makeEntityLink(title: Inline, pos: Position, query: String, inTplOpt: Option[DocTemplateImpl]) =
new EntityLink(title) { lazy val link = memberLookup(pos, query, inTplOpt) }
@@ -21,23 +22,44 @@ trait MemberLookup {
var members = breakMembers(query)
//println(query + " => " + members)
- // (1) Lookup in the root package, as most of the links are qualified
- var linkTo: List[LinkTo] = lookupInRootPackage(pos, members)
+ // (1) First look in the root package, as most of the links are qualified
+ val fromRoot = lookupInRootPackage(pos, members)
- // (2) Recursively go into each
- if (inTplOpt.isDefined) {
- var currentTpl = inTplOpt.get
- while (currentTpl != null && !currentTpl.isRootPackage && (linkTo.isEmpty)) {
- linkTo = lookupInTemplate(pos, members, currentTpl)
- currentTpl = currentTpl.inTemplate
- }
+ // (2) Or recursively go into each containing template.
+ val fromParents = inTplOpt.fold(Stream.empty[DocTemplateImpl]) { tpl =>
+ Stream.iterate(tpl)(_.inTemplate)
+ }.takeWhile (tpl => tpl != null && !tpl.isRootPackage).map { tpl =>
+ lookupInTemplate(pos, members, tpl.asInstanceOf[EntityImpl].sym)
}
- // (3) Look at external links
- if (linkTo.isEmpty) {
- // TODO: IF THIS IS THE ROOT PACKAGE, LOOK AT EXTERNAL LINKS
+ val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
+ val linkTo = createLinks(syms) match {
+ case Nil if !syms.isEmpty =>
+ // (3) Look at external links
+ syms.flatMap { case (sym, owner) =>
+
+ // reconstruct the original link
+ def linkName(sym: Symbol) = {
+ def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
+ def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
+ val packageSuffix = if (sym.isPackage) ".package" else ""
+
+ sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
+ }
+
+ if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+ findExternalLink(linkName(sym))
+ else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
+ findExternalLink(linkName(owner) + "@" + externalSignature(sym))
+ else
+ None
+ }
+ case links => links
}
+ //println(createLinks(syms))
+ //println(linkTo)
+
// (4) if we still haven't found anything, create a tooltip, if we found too many, report
if (linkTo.isEmpty){
if (!settings.docNoLinkWarnings.value)
@@ -97,9 +119,23 @@ trait MemberLookup {
private object OnlyType extends SearchStrategy
private object OnlyTerm extends SearchStrategy
- private def lookupInRootPackage(pos: Position, members: List[String]) = lookupInTemplate(pos, members, makeRootPackage)
+ private def lookupInRootPackage(pos: Position, members: List[String]) =
+ if (members.length == 1)
+ lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage)
+ else
+ lookupInTemplate(pos, members, RootPackage)
- private def lookupInTemplate(pos: Position, members: List[String], inTpl: DocTemplateImpl): List[LinkTo] = {
+ private def createLinks(syms: List[(Symbol, Symbol)]): List[LinkTo] =
+ syms.flatMap { case (sym, owner) =>
+ if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+ findTemplateMaybe(sym) map (LinkToTpl(_))
+ else
+ findTemplateMaybe(owner) flatMap { inTpl =>
+ inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl))
+ }
+ }
+
+ private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = {
// Maintaining compatibility with previous links is a bit tricky here:
// we have a preference for term names for all terms except for the last, where we prefer a class:
// How to do this:
@@ -108,53 +144,56 @@ trait MemberLookup {
// * we look for terms with the last member's name
// * we look for types with the same name, all the way up
val result = members match {
- case Nil =>
- Nil
+ case Nil => Nil
case mbrName::Nil =>
- var members = lookupInTemplate(pos, mbrName, inTpl, OnlyType)
- if (members.isEmpty)
- members = lookupInTemplate(pos, mbrName, inTpl, OnlyTerm)
-
- members.map(_ match {
- case tpl: DocTemplateEntity => LinkToTpl(tpl)
- case mbr => LinkToMember(mbr, inTpl)
- })
+ var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container))
+ if (syms.isEmpty)
+ syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container))
+ syms
case tplName::rest =>
+ def completeSearch(syms: List[Symbol]) =
+ syms filter {sym => sym.isPackage || sym.isClass || sym.isModule} flatMap (lookupInTemplate(pos, rest, _))
- def completeSearch(mbrs: List[MemberImpl]) =
- mbrs.collect({case d:DocTemplateImpl => d}).flatMap(tpl => lookupInTemplate(pos, rest, tpl))
-
- var members = completeSearch(lookupInTemplate(pos, tplName, inTpl, OnlyTerm))
- if (members.isEmpty)
- members = completeSearch(lookupInTemplate(pos, tplName, inTpl, OnlyType))
-
- members
+ completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match {
+ case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType))
+ case syms => syms
+ }
}
- //println("lookupInTemplate(" + members + ", " + inTpl + ") => " + result)
+ //println("lookupInTemplate(" + members + ", " + container + ") => " + result)
result
}
- private def lookupInTemplate(pos: Position, member: String, inTpl: DocTemplateImpl, strategy: SearchStrategy): List[MemberImpl] = {
+ private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = {
val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*")
+ def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name)
+
+ // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves
+ // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info
+ // and removing NoType classes
+ def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) }
+
+ def syms(name: Name) = container.info.nonPrivateMember(name).alternatives
+ def termSyms = cleanupBogusClasses(syms(newTermName(name)))
+ def typeSyms = cleanupBogusClasses(syms(newTypeName(name)))
+
val result = if (member.endsWith("$"))
- inTpl.members.filter(mbr => (mbr.name == name) && (mbr.isTerm))
+ termSyms
else if (member.endsWith("!"))
- inTpl.members.filter(mbr => (mbr.name == name) && (mbr.isType))
+ typeSyms
else if (member.endsWith("*"))
- inTpl.members.filter(mbr => (mbr.signature.startsWith(name)))
- else {
+ cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch
+ else
if (strategy == BothTypeAndTerm)
- inTpl.members.filter(_.name == name)
+ termSyms ::: typeSyms
else if (strategy == OnlyType)
- inTpl.members.filter(mbr => (mbr.name == name) && (mbr.isType))
+ typeSyms
else if (strategy == OnlyTerm)
- inTpl.members.filter(mbr => (mbr.name == name) && (mbr.isTerm))
+ termSyms
else
Nil
- }
- //println("lookupInTemplate(" + member + ", " + inTpl + ") => " + result)
+ //println("lookupInTemplate(" + member + ", " + container + ") => " + result)
result
}
@@ -170,7 +209,11 @@ trait MemberLookup {
if ((query.charAt(index) == '.' || query.charAt(index) == '#') &&
((index == 0) || (query.charAt(index-1) != '\\'))) {
- members ::= query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
+ val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
+ // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first
+ // elemnt in the list
+ if ((member != "") || (!members.isEmpty))
+ members ::= member
last_index = index + 1
}
index += 1
@@ -184,4 +227,4 @@ trait MemberLookup {
object MemberLookup {
private[this] var _showExplanation = true
def showExplanation: Boolean = if (_showExplanation) { _showExplanation = false; true } else false
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index a962ec4007..86bf1f1efd 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -154,7 +154,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
/* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
* {{{
- * implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ * implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
* def isParallel = ...
* }}}
* the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
@@ -209,7 +209,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
def isTemplate = false
- lazy val signature = {
+ def signature = externalSignature(sym)
+ lazy val signatureCompat = {
def defParams(mbr: Any): String = mbr match {
case d: MemberEntity with Def =>
@@ -402,7 +403,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* This is the final point in the core model creation: no DocTemplates are created after the model has finished, but
* inherited templates and implicit members are added to the members at this point.
*/
- def completeModel: Unit = {
+ def completeModel(): Unit = {
// DFS completion
// since alias types and abstract types have no own members, there's no reason for them to call completeModel
if (!sym.isAliasType && !sym.isAbstractType)
@@ -894,9 +895,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeAnnotation(annot: AnnotationInfo): Annotation = {
+ def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
val aSym = annot.symbol
- new EntityImpl(aSym, makeTemplate(aSym.owner)) with Annotation {
+ new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
lazy val annotationClass =
makeTemplate(annot.symbol)
val arguments = { // lazy
@@ -1075,12 +1076,24 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
// the implicit conversions that are excluded from the pages should not appear in the diagram
- def implicitExcluded(convertorMethod: String): Boolean = settings.hardcoded.commonConversionTargets.contains(convertorMethod)
+ def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod)
// whether or not to create a page for an {abstract,alias} type
def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) =
(settings.docExpandAllTypes.value && (bSym.sourceFile != null)) ||
{ val rawComment = global.expandedDocComment(bSym, inTpl.sym)
rawComment.contains("@template") || rawComment.contains("@documentable") }
+
+ def findExternalLink(name: String): Option[LinkTo] =
+ settings.extUrlMapping find {
+ case (pkg, _) => name startsWith pkg
+ } map {
+ case (_, url) => LinkToExternal(name, url + "#" + name)
+ }
+
+ def externalSignature(sym: Symbol) = {
+ sym.info // force it, otherwise we see lazy types
+ (sym.nameString + sym.signatureString).replaceAll("\\s", "")
+ }
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index 5a0cc602e5..89195020c4 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -113,9 +113,9 @@ trait ModelFactoryImplicitSupport {
conversions = conversions.filter((ic: ImplicitConversionImpl) =>
hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
- // Put the class-specific conversions in front
+ // Put the visible conversions in front
val (ownConversions, commonConversions) =
- conversions.partition(!_.isCommonConversion)
+ conversions.partition(!_.isHiddenConversion)
ownConversions ::: commonConversions
}
@@ -176,7 +176,7 @@ trait ModelFactoryImplicitSupport {
val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
val appliedTreeTyped: Tree = {
val newContext = context.makeImplicit(context.ambiguousErrors)
- newContext.macrosEnabled = false // [Eugene] I assume you want macro signature, not macro expansion
+ newContext.macrosEnabled = false
val newTyper = global.analyzer.newTyper(newContext)
newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
@@ -416,7 +416,7 @@ trait ModelFactoryImplicitSupport {
lazy val members: List[MemberEntity] = memberImpls
- def isCommonConversion = hardcoded.commonConversionTargets.contains(conversionQualifiedName)
+ def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName)
override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 6c80c85efe..c67a398bb7 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -30,7 +30,7 @@ trait ModelFactoryTypeSupport {
import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
import rootMirror.{ RootPackage, RootClass, EmptyPackage }
- protected var typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
+ protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
/** */
def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
@@ -77,7 +77,8 @@ trait ModelFactoryTypeSupport {
// ===> in such cases we have two options:
// (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly
// (1) if we generate the doc template for Day, we can link to the correct member
- // (2) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip
+ // (2) If the symbol comes from an external library for which we know the documentation URL, point to it.
+ // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip
val bSym = normalizeTemplate(aSym)
val owner =
if ((preSym != NoSymbol) && /* it needs a prefix */
@@ -87,20 +88,27 @@ trait ModelFactoryTypeSupport {
else
bSym.owner
- val bTpl = findTemplateMaybe(bSym)
val link =
- if (owner == bSym.owner && bTpl.isDefined)
- // (0) the owner's class is linked AND has a template - lovely
- LinkToTpl(bTpl.get)
- else {
- val oTpl = findTemplateMaybe(owner)
- val bMbr = oTpl.map(findMember(bSym, _))
- if (oTpl.isDefined && bMbr.isDefined && bMbr.get.isDefined)
- // (1) the owner's class
- LinkToMember(bMbr.get.get, oTpl.get) //ugh
- else
- // (2) if we still couldn't find the owner, show a tooltip with the qualified name
- Tooltip(makeQualifiedName(bSym))
+ findTemplateMaybe(bSym) match {
+ case Some(bTpl) if owner == bSym.owner =>
+ // (0) the owner's class is linked AND has a template - lovely
+ LinkToTpl(bTpl)
+ case _ =>
+ val oTpl = findTemplateMaybe(owner)
+ (oTpl, oTpl flatMap (findMember(bSym, _))) match {
+ case (Some(oTpl), Some(bMbr)) =>
+ // (1) the owner's class
+ LinkToMember(bMbr, oTpl)
+ case _ =>
+ val name = makeQualifiedName(bSym)
+ if (!bSym.owner.isPackage)
+ Tooltip(name)
+ else
+ findExternalLink(name).getOrElse (
+ // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name
+ Tooltip(name)
+ )
+ }
}
// SI-4360 Showing prefixes when necessary
@@ -308,16 +316,8 @@ trait ModelFactoryTypeSupport {
// SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the
// same type based on the template the type is shown in.
- if (settings.docNoPrefixes.value) {
- val cached = typeCache.get(aType)
- cached match {
- case Some(typeEntity) =>
- typeEntity
- case None =>
- val typeEntity = createTypeEntity
- typeCache += aType -> typeEntity
- typeEntity
- }
- } else createTypeEntity
+ if (settings.docNoPrefixes.value)
+ typeCache.getOrElseUpdate(aType, createTypeEntity)
+ else createTypeEntity
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index 59cdf05957..1baa7f9831 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -13,7 +13,7 @@ import scala.collection._
import scala.util.matching.Regex
import scala.annotation.switch
import scala.reflect.internal.util.{NoPosition, Position}
-import language.postfixOps
+import scala.language.postfixOps
/** The comment parser transforms raw comment strings into `Comment` objects.
* Call `parse` to run the parser. Note that the parser is stateless and
@@ -760,7 +760,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member
}
def link(): Inline = {
- val SchemeUri = """([^:]+:.*)""".r
+ val SchemeUri = """([a-z]+:.*)""".r
jump("[[")
var parens = 1
readUntil { parens += 1; !check("[") }
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index 2645d8fd14..db2d0c0175 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -4,7 +4,7 @@ package diagram
import model._
import comment.CommentFactory
-import collection.mutable
+import scala.collection.mutable
// statistics
import html.page.diagram.DiagramStats
@@ -150,7 +150,7 @@ trait DiagramFactory extends DiagramDirectiveParser {
if (nodesShown.isEmpty)
None
else {
- val nodes = nodesAll.filter(nodesShown.contains(_)).map(mapNodes(_))
+ val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_))
val edges = edgesAll.map(pair => (mapNodes(pair._1), pair._2.map(mapNodes(_)))).filterNot(pair => pair._2.isEmpty)
val diagram =
// TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index e31c5eda31..7067daec26 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -13,7 +13,7 @@ import scala.reflect.internal.util.FakePos
import dependencies._
import io.AbstractFile
-import language.implicitConversions
+import scala.language.implicitConversions
trait BuildManager {
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index 6acd6d2382..3de2359ce3 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -257,12 +257,18 @@ trait CompilerControl { self: Global =>
*/
def askForResponse[A](op: () => A): Response[A] = {
val r = new Response[A]
- val ir = scheduler askDoQuickly op
- ir onComplete {
- case Left(result) => r set result
- case Right(exc) => r raise exc
+ if (self.onCompilerThread) {
+ try { r set op() }
+ catch { case exc: Throwable => r raise exc }
+ r
+ } else {
+ val ir = scheduler askDoQuickly op
+ ir onComplete {
+ case Left(result) => r set result
+ case Right(exc) => r raise exc
+ }
+ r
}
- r
}
def onCompilerThread = Thread.currentThread == compileRunner
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
index 4a60211254..a906d1454c 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interactive
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Position
trait ContextTrees { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index cb2e7d641b..5514983d98 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -20,7 +20,7 @@ import scala.tools.nsc.io.Pickler._
import scala.tools.nsc.typechecker.DivergentImplicit
import scala.annotation.tailrec
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import language.implicitConversions
+import scala.language.implicitConversions
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
index f57786578a..104a69897d 100644
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interactive
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Position
import reporters.Reporter
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index f4ec03bb47..8ed7a67058 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -13,7 +13,7 @@ import util.EmptyAction
import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
import io.{Pickler, CondPickler}
import io.Pickler._
-import collection.mutable
+import scala.collection.mutable
import mutable.ListBuffer
trait Picklers { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index 2d93c77ca4..afac5828e5 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -133,50 +133,6 @@ object REPL {
iSourceName
}
- /** Compile instrumented source file
- * @param iSourceName The name of the instrumented source file
- * @param arguments Further argumenrs to pass to the compiler
- * @return Optionallu, if no -d option is given, the virtual directory
- * contained the generated bytecode classes
- def compileInstrumented(iSourceName: String, arguments: List[String]): Option[AbstractFile] = {
- println("compiling "+iSourceName)
- val command = new CompilerCommand(iSourceName :: arguments, reporter.error(scala.reflect.internal.util.NoPosition, _))
- val virtualDirectoryOpt =
- if (arguments contains "-d")
- None
- else {
- val vdir = new VirtualDirectory("(memory)", None)
- command.settings.outputDirs setSingleOutput vdir
- Some(vdir)
- }
- val compiler = new scala.tools.nsc.Global(command.settings, reporter)
- val run = new compiler.Run()
- println("compiling: "+command.files)
- run compile command.files
- virtualDirectoryOpt
- }
-
- /** Run instrumented bytecode file
- * @param vdir Optionally, the virtual directory containing the generated bytecode classes
- * @param iFullName The full name of the generated object
- * @param stripped The contents original source file without any right hand column comments.
- * @return The generated file content containing original source in the left column
- * and outputs in the right column
- */
- def runInstrumented(vdirOpt: Option[AbstractFile], iFullName: String, stripped: Array[Char]): Array[Char] = {
- val defaultClassLoader = getClass.getClassLoader
- val classLoader = vdirOpt match {
- case Some(vdir) => new AbstractFileClassLoader(vdir, defaultClassLoader)
- case None => defaultClassLoader
- }
- println("running "+iFullName)
- val si = new SourceInserter(stripped)
- Executor.execute(iFullName, si, classLoader)
- println("done")
- si.currentContents
- }
- */
-
/** The method for implementing worksheet functionality.
* @param arguments a file name, followed by optional command line arguments that are passed
* to the compiler that processes the instrumented source.
@@ -191,7 +147,7 @@ object REPL {
// strip right hand side comment column and any trailing spaces from all lines
val strippedContents = SourceInserter.stripRight(source.content)
val strippedSource = new BatchSourceFile(source.file, strippedContents)
- println("stripped source = "+strippedSource)
+ println("stripped source = "+strippedSource+":"+strippedContents.mkString)
comp.askReload(List(strippedSource), reloadResult)
comp.askInstrumented(strippedSource, line, instrumentedResult)
using(instrumentedResult) {
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index de6974cbb2..40982c62f0 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -220,7 +220,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
/** Return the set of source files that are invalidated by the given changes. */
- def invalidated(files: Set[AbstractFile], changesOf: collection.Map[Symbol, List[Change]],
+ def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
processed: Set[AbstractFile] = Set.empty):
Set[AbstractFile] = {
val buf = new mutable.HashSet[AbstractFile]
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
index 0f52258b7e..ca5bdd632f 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -6,7 +6,7 @@ package scala.tools.nsc
package interactive
import scala.reflect.internal.util.{SourceFile, Position, NoPosition}
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
trait RichCompilationUnits { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
index efc393c812..7d332d9f7f 100644
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -2,8 +2,9 @@ package scala.tools.nsc
package interactive
import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
-import collection.mutable.ArrayBuffer
-import reflect.internal.Chars.{isLineBreakChar, isWhitespace}
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace}
+import ast.parser.Tokens._
trait ScratchPadMaker { self: Global =>
@@ -11,7 +12,7 @@ trait ScratchPadMaker { self: Global =>
private case class Patch(offset: Int, text: String)
- private class Patcher(contents: Array[Char], endOffset: Int) extends Traverser {
+ private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser {
var objectName: String = ""
private val patches = new ArrayBuffer[Patch]
@@ -24,13 +25,17 @@ trait ScratchPadMaker { self: Global =>
"res$"+resNum
}
- private def nameType(name: String, tpe: Type): String = name+": "+tpe
+ private def nameType(name: String, tpe: Type): String = {
+ // if name ends in symbol character, add a space to separate it from the following ':'
+ val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " "
+ name+pad+": "+tpe
+ }
- private def nameType(sym: Symbol): String = nameType(sym.name.toString, sym.tpe)
+ private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe)
private def literal(str: String) = "\"\"\""+str+"\"\"\""
- private val prologue = "import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{"
+ private val prologue = ";import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{"
private val epilogue = "}"
@@ -42,19 +47,19 @@ trait ScratchPadMaker { self: Global =>
/** The position where to insert an instrumentation statement in front of giuven statement.
* This is at the latest `stat.pos.start`. But in order not to mess with column numbers
- * in position we try to insert it at the end of the preceding line instead.
- * To be safe, this can be done only if there's only whitespace between that position and
- * statement's start position.
+ * in position we try to insert it at the end of the previous token instead.
+ * Furthermore, `(' tokens have to be skipped because they do not show up
+ * in statement range positions.
*/
- private def instrumentPos(stat: Tree): Int = {
- var start = stat.pos.start
- while (start > 0 && isWhitespace(contents(start - 1))) start -= 1
- if (start > 0 && isLineBreakChar(contents(start - 1))) start -= 1
- start
+ private def instrumentPos(start: Int): Int = {
+ val (prevToken, prevStart, prevEnd) = lex.locate(start - 1)
+ if (prevStart >= start) start
+ else if (prevToken == LPAREN) instrumentPos(prevStart)
+ else prevEnd
}
private def addSkip(stat: Tree): Unit = {
- val ipos = instrumentPos(stat)
+ val ipos = instrumentPos(stat.pos.start)
if (stat.pos.start > skipped) applyPendingPatches(ipos)
if (stat.pos.start >= endOffset)
patches += Patch(ipos, ";$stop()")
@@ -98,7 +103,8 @@ trait ScratchPadMaker { self: Global =>
} else {
val resName = nextRes()
val dispResName = resName filter ('$' != _)
- patches += Patch(stat.pos.start, "val " + resName + " = ")
+ val offset = instrumentPos(stat.pos.start)
+ patches += Patch(offset, "val " + resName + " = ")
addSandbox(stat)
toPrint += resultString(nameType(dispResName, stat.tpe), resName)
}
@@ -111,11 +117,14 @@ trait ScratchPadMaker { self: Global =>
super.traverse(tree)
case ModuleDef(_, name, Template(_, _, body)) =>
val topLevel = objectName.isEmpty
- if (topLevel) objectName = tree.symbol.fullName
- body foreach traverseStat
- applyPendingPatches(skipped)
- if (topLevel)
- patches += Patch(skipped, epilogue)
+ if (topLevel) {
+ objectName = tree.symbol.fullName
+ body foreach traverseStat
+ if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements
+ applyPendingPatches(skipped)
+ patches += Patch(skipped, epilogue)
+ }
+ }
case _ =>
}
@@ -144,6 +153,33 @@ trait ScratchPadMaker { self: Global =>
}
}
+ class LexicalStructure(source: SourceFile) {
+ val token = new ArrayBuffer[Int]
+ val startOffset = new ArrayBuffer[Int]
+ val endOffset = new ArrayBuffer[Int]
+ private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source))
+ scanner.init()
+ while (scanner.token != EOF) {
+ startOffset += scanner.offset
+ token += scanner.token
+ scanner.nextToken
+ endOffset += scanner.lastOffset
+ }
+
+ /** @return token that starts before or at offset, its startOffset, its endOffset
+ */
+ def locate(offset: Int): (Int, Int, Int) = {
+ var lo = 0
+ var hi = token.length - 1
+ while (lo < hi) {
+ val mid = (lo + hi + 1) / 2
+ if (startOffset(mid) <= offset) lo = mid
+ else hi = mid - 1
+ }
+ (token(lo), startOffset(lo), endOffset(lo))
+ }
+ }
+
/** Compute an instrumented version of a sourcefile.
* @param source The given sourcefile.
* @param line The line up to which results should be printed, -1 = whole document.
@@ -156,7 +192,7 @@ trait ScratchPadMaker { self: Global =>
protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
val tree = typedTree(source, true)
val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
- val patcher = new Patcher(source.content, endOffset)
+ val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
patcher.traverse(tree)
(patcher.objectName, patcher.result)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index deeb398d39..cb46c0fdca 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -15,7 +15,7 @@ import scala.annotation.migration
import scala.reflect.internal.util.Position
import scala.reflect.internal.util.SourceFile
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
/** A base class for writing interactive compiler tests.
*
@@ -127,4 +127,4 @@ abstract class InteractiveTest
// the presentation compiler
sys.exit(0)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
index 5270b1971a..aadffe2da5 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
@@ -9,7 +9,7 @@ package tests
import scala.reflect.internal.util._
import reporters._
import io.AbstractFile
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
@@ -168,7 +168,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
}
case class ErrorTrace(
- sfidx: Int, changes: Seq[Change], infos: collection.Set[reporter.Info], content: Array[Char]) {
+ sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
override def toString =
"Sourcefile: "+inputs(sfidx)+
"\nChanges:\n "+changes.mkString("\n ")+
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index 18a8eb5fc3..b5ae5f2d75 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
@@ -36,7 +36,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
/** Return all positions of the given str in the given source file. */
private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
- val buf = new collection.mutable.ListBuffer[Position]
+ val buf = new scala.collection.mutable.ListBuffer[Position]
var pos = source.content.indexOfSlice(str)
while (pos >= 0) {
buf += source.position(pos - 1) // we need the position before the first character of this marker
@@ -44,7 +44,7 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
}
buf.toList
}
-
+
private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
/** Return the filename:line:col version of this position. */
def showPos(pos: Position): String =
@@ -59,4 +59,4 @@ trait PresentationCompilerRequestsWorkingMode extends TestResources {
println("ERROR: " + r)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
index b5ea6ab7ce..ba1722382b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -3,7 +3,7 @@ package scala.tools.nsc.interactive.tests.core
case class DuplicateTestMarker(msg: String) extends Exception(msg)
object TestMarker {
- import collection.mutable.Map
+ import scala.collection.mutable.Map
private val markers: Map[String, TestMarker] = Map.empty
private def checkForDuplicate(marker: TestMarker) {
@@ -24,4 +24,4 @@ object CompletionMarker extends TestMarker("/*!*/")
object TypeMarker extends TestMarker("/*?*/")
-object HyperlinkMarker extends TestMarker("/*#*/") \ No newline at end of file
+object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
index abf326e746..5475410a84 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -77,7 +77,7 @@ object CompletionAware {
/** Convenience factories.
*/
def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
- def apply(map: collection.Map[String, CompletionAware]): CompletionAware =
+ def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
apply(() => map.keys.toList, map.get _)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index f49e8d6b59..0f5777d260 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -37,7 +37,7 @@ trait ExprTyper {
}
/** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- def parse(line: String): Option[List[Tree]] = {
+ def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
var isIncomplete = false
reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
val trees = codeParser.stmts(line)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 4cc58d3f25..864f9bd073 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -23,7 +23,7 @@ import scala.reflect.NameTransformer._
import util.ScalaClassLoader
import ScalaClassLoader._
import scala.tools.util._
-import language.{implicitConversions, existentials}
+import scala.language.{implicitConversions, existentials}
import scala.reflect.{ClassTag, classTag}
import scala.tools.reflect.StdRuntimeTags._
@@ -839,7 +839,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
addThunk({
import scala.tools.nsc.io._
import Properties.userHome
- import compat.Platform.EOL
+ import scala.compat.Platform.EOL
val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
if (autorun.isDefined) intp.quietRun(autorun.get)
})
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
index 2c16a754cb..4a4a900e2b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
@@ -8,6 +8,7 @@ package interpreter
import scala.reflect.internal.util.Position
import scala.util.control.Exception.ignoring
+import scala.tools.nsc.util.stackTraceString
/**
* Machinery for the asynchronous initialization of the repl.
@@ -94,9 +95,7 @@ trait ILoopInit {
runThunks()
} catch {
case ex: Throwable =>
- val message = new java.io.StringWriter()
- ex.printStackTrace(new java.io.PrintWriter(message))
- initError = message.toString
+ initError = stackTraceString(ex)
throw ex
} finally {
initIsComplete = true
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 6eadc1e63b..e7c56718f7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -25,7 +25,7 @@ import scala.util.control.Exception.{ ultimately }
import IMain._
import java.util.concurrent.Future
import typechecker.Analyzer
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
import scala.tools.reflect.StdRuntimeTags._
@@ -145,7 +145,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
private def _initialize() = {
try {
- // [Eugene] todo. if this crashes, REPL will hang
+ // todo. if this crashes, REPL will hang
new _compiler.Run() compileSources _initSources
_initializeComplete = true
true
@@ -387,8 +387,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
newSym <- req.definedSymbols get name
oldSym <- oldReq.definedSymbols get name.companionName
} {
- replwarn("warning: previously defined %s is not a companion to %s.".format(
- stripString("" + oldSym), stripString("" + newSym)))
+ afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
index 5193166889..14d43bc6d5 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
@@ -23,7 +23,7 @@ trait Imports {
val hd :: tl = sym.fullName.split('.').toList map newTermName
val tree = Import(
tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
- List(ImportSelector(nme.WILDCARD, -1, null, -1))
+ ImportSelector.wildList
)
tree setSymbol sym
new ImportHandler(tree)
@@ -192,4 +192,4 @@ trait Imports {
private def membersAtPickler(sym: Symbol): List[Symbol] =
beforePickler(sym.info.nonPrivateMembers.toList)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index edb95f7526..bab3a1e506 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -9,7 +9,7 @@ package interpreter
import scala.tools.jline._
import scala.tools.jline.console.completer._
import Completion._
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
index 2dc394a081..f0e643d572 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -6,12 +6,12 @@
package scala.tools.nsc
package interpreter
-import collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
-import language.implicitConversions
+import scala.language.implicitConversions
class ProcessResult(val line: String) {
- import sys.process._
+ import scala.sys.process._
private val buffer = new ListBuffer[String]
val builder = Process(line)
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index c041f02859..bf7204c754 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -10,7 +10,7 @@ import scala.collection.{ mutable, immutable }
import scala.PartialFunction.cond
import scala.reflect.internal.Chars
import scala.reflect.internal.Flags._
-import language.implicitConversions
+import scala.language.implicitConversions
trait MemberHandlers {
val intp: IMain
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
index a2b42aeefc..3203e2ba49 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package interpreter
import NamedParam._
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
index 9124eace05..66d748a9f1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package interpreter
import scala.collection.{ mutable, immutable }
-import language.implicitConversions
+import scala.language.implicitConversions
/** Mix this into an object and use it as a phasing
* swiss army knife.
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index 4ba0c59112..244c04bdf4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -13,7 +13,7 @@ import session.{ History }
import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index 5b8e4c3d92..9503c7d970 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package interpreter
-import language.implicitConversions
-import scala.reflect.base.{Universe => BaseUniverse}
+import scala.language.implicitConversions
+import scala.reflect.api.{Universe => ApiUniverse}
import scala.reflect.runtime.{universe => ru}
/** A class which the repl utilizes to expose predefined objects.
@@ -65,7 +65,7 @@ object ReplVals {
* I have this forwarder which widens the type and then cast the result back
* to the dependent type.
*/
- def compilerTypeFromTag(t: BaseUniverse # AbsTypeTag[_]): Global#Type =
+ def compilerTypeFromTag(t: ApiUniverse # WeakTypeTag[_]): Global#Type =
definitions.compilerTypeFromTag(t)
class AppliedTypeFromTags(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
index d6604499b4..5642566cf7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
@@ -14,7 +14,7 @@ import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
import typechecker.DestructureTypes
import scala.reflect.internal.util.StringOps.ojoin
-import language.implicitConversions
+import scala.language.implicitConversions
/** A more principled system for turning types into strings.
*/
@@ -213,7 +213,7 @@ trait TypeStrings {
private def tparamString[T: ru.TypeTag] : String = {
def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
- // [Eugene++] todo. need to use not the `rootMirror`, but a mirror with the REPL's classloader
+ // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader
// how do I get to it? acquiring context classloader seems unreliable because of multithreading
def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ))
brackets(typeArguments map (jc => tvarString(List(jc))): _*)
@@ -256,4 +256,4 @@ trait TypeStrings {
)
}
-object TypeStrings extends TypeStrings { } \ No newline at end of file
+object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
index 98129aded8..6a3a2a38ae 100644
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import language.implicitConversions
+import scala.language.implicitConversions
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
@@ -35,10 +35,10 @@ package object interpreter extends ReplConfig with ReplStrings {
val IR = Results
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
- import collection.JavaConverters._
+ import scala.collection.JavaConverters._
xs.asScala.toList map ("" + _)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
index c1cd599941..58232e6b9a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package interpreter
-import language.implicitConversions
+import scala.language.implicitConversions
/** Files having to do with the state of a repl session:
* lines of text entered, types and terms defined, etc.
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index 12ba3e4bd7..f66f3daa32 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -8,10 +8,10 @@ package io
import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, FileInputStream, DataOutputStream }
import java.util.jar._
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
import Attributes.Name
import util.ClassPath
-import language.implicitConversions
+import scala.language.implicitConversions
// Attributes.Name instances:
//
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index 48361cd157..b03a921e87 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -1,10 +1,10 @@
package scala.tools.nsc.io
-import annotation.unchecked
+import scala.annotation.unchecked
import Lexer._
import java.io.Writer
-import language.implicitConversions
-import reflect.ClassTag
+import scala.language.implicitConversions
+import scala.reflect.ClassTag
/** An abstract class for writing and reading Scala objects to and
* from a legible representation. The presesentation follows the following grammar:
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 3c4f004198..ae83a7728b 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -8,10 +8,27 @@ package scala.tools.nsc
import java.util.concurrent.{ Future, Callable }
import java.util.{ Timer, TimerTask }
import java.util.jar.{ Attributes }
-import language.implicitConversions
+import scala.language.implicitConversions
package object io {
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ // Forwarders from scala.reflect.io
+ type AbstractFile = scala.reflect.io.AbstractFile
+ val AbstractFile = scala.reflect.io.AbstractFile
+ type Directory = scala.reflect.io.Directory
+ val Directory = scala.reflect.io.Directory
+ type File = scala.reflect.io.File
+ val File = scala.reflect.io.File
+ type Path = scala.reflect.io.Path
+ val Path = scala.reflect.io.Path
+ type PlainFile = scala.reflect.io.PlainFile
+ val PlainFile = scala.reflect.io.PlainFile
+ val Streamable = scala.reflect.io.Streamable
+ type VirtualDirectory = scala.reflect.io.VirtualDirectory
+ type VirtualFile = scala.reflect.io.VirtualFile
+ val ZipArchive = scala.reflect.io.ZipArchive
+ type ZipArchive = scala.reflect.io.ZipArchive
+
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index d1c404b3e3..a30ae1cb36 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -12,7 +12,7 @@ import scala.reflect.internal.util.OffsetPosition
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
-import language.implicitConversions
+import scala.language.implicitConversions
trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val global : Global
@@ -551,7 +551,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), List(List()))
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
}
mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
@@ -640,7 +640,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def importCompanionObject(cdef: ClassDef): Tree =
atPos(cdef.pos) {
- Import(Ident(cdef.name.toTermName), List(ImportSelector(nme.WILDCARD, -1, null, -1)))
+ Import(Ident(cdef.name.toTermName), ImportSelector.wildList)
}
// Importing the companion object members cannot be done uncritically: see
@@ -841,7 +841,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val predefs = List(
DefDef(
Modifiers(Flags.JAVA | Flags.STATIC), nme.values, List(),
- List(List()),
+ ListOfNil,
arrayOf(enumType),
blankExpr),
DefDef(
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 0367119547..7aeae485d0 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -11,7 +11,7 @@ import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import JavaTokens._
import scala.annotation.switch
-import language.implicitConversions
+import scala.language.implicitConversions
// Todo merge these better with Scanners
trait JavaScanners extends ast.parser.ScannersCommon {
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 6d8c80d1d0..be8f1e3d9e 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -9,8 +9,8 @@ package matching
import transform.ExplicitOuter
import ast.{ Printers, Trees }
import java.io.{ StringWriter, PrintWriter }
-import annotation.elidable
-import language.postfixOps
+import scala.annotation.elidable
+import scala.language.postfixOps
/** Ancillary bits of ParallelMatching which are better off
* out of the way.
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 1cf4bccb40..93e936fe1f 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -9,7 +9,7 @@ package matching
import transform.ExplicitOuter
import symtab.Flags
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
trait Matrix extends MatrixAdditions {
self: ExplicitOuter with ParallelMatching =>
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 71deb2d356..1d21e4952f 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -13,8 +13,8 @@ import scala.reflect.internal.util.Position
import transform.ExplicitOuter
import symtab.Flags
import mutable.ListBuffer
-import annotation.elidable
-import language.postfixOps
+import scala.annotation.elidable
+import scala.language.postfixOps
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index 8e043613b8..ee96f15f40 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -8,7 +8,7 @@ package matching
import transform.ExplicitOuter
import PartialFunction._
-import language.postfixOps
+import scala.language.postfixOps
trait PatternBindings extends ast.TreeDSL
{
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 9ad0d9ba1f..9d593e5acc 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -14,4 +14,6 @@ package object nsc {
type MissingRequirementError = scala.reflect.internal.MissingRequirementError
val MissingRequirementError = scala.reflect.internal.MissingRequirementError
+
+ val ListOfNil = List(Nil)
}
diff --git a/src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala b/src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala
deleted file mode 100644
index 92ccd79df9..0000000000
--- a/src/compiler/scala/tools/nsc/scratchpad/CommentOutputStream.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.tools.nsc.scratchpad
-
-import java.io.OutputStream
-
-class CommentOutputStream(out: CommentWriter, encoding: String = "") extends OutputStream {
-
- override def write(bs: Array[Byte]) =
- out.write(if (encoding.isEmpty) new String(bs) else new String(bs, encoding))
-
- override def write(bs: Array[Byte], off: Int, len: Int) =
- out.write(if (encoding.isEmpty) new String(bs, off, len) else new String(bs, off, len, encoding))
-
- override def write(ch: Int) =
- write(Array(ch.toByte))
-
- override def close() = out.close()
- override def flush() = out.flush()
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala b/src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala
deleted file mode 100644
index eb8880e437..0000000000
--- a/src/compiler/scala/tools/nsc/scratchpad/CommentWriter.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.tools.nsc.scratchpad
-
-import java.io.Writer
-import reflect.internal.Chars._
-
-
-class CommentWriter(underlying: SourceInserter, startCol: Int = 40, endCol: Int = 152) extends Writer {
-
- private def rightCol(marker: String) = {
- while (underlying.column < startCol) underlying.write(' ')
- underlying.write(marker)
- }
-
- private var lastWasNL = false
-
- private def writeChar(ch: Char) = {
- if (underlying.column >= endCol) {
- underlying.write('\n'); rightCol("//| ")
- }
- if (underlying.column < startCol) rightCol("//> ")
- underlying.write(ch)
- lastWasNL = isLineBreakChar(ch)
- }
-
- override def write(chs: Array[Char], off: Int, len: Int) = {
- for (i <- off until off + len) writeChar(chs(i))
- flush()
- }
-
- def skip(len: Int) {
- if (lastWasNL) {
- underlying.backspace()
- lastWasNL = false
- }
- underlying.skip(len)
- if (underlying.column >= startCol) underlying.write('\n')
- }
-
- override def close() = underlying.close()
- override def flush() = underlying.flush()
-}
-
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
index 67ff916b11..f7ad39bd95 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
@@ -5,7 +5,7 @@ import java.io.{FileInputStream, InputStreamReader, IOException}
import scala.runtime.ScalaRunTime.stringOf
import java.lang.reflect.InvocationTargetException
import scala.reflect.runtime.ReflectionUtils._
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
class Mixer {
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
index 42a35dc642..5eeab53fca 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
@@ -3,8 +3,7 @@ package scratchpad
import java.io.Writer
import scala.reflect.internal.util.SourceFile
-
-import reflect.internal.Chars._
+import scala.reflect.internal.Chars._
object SourceInserter {
def stripRight(cs: Array[Char]): Array[Char] = {
@@ -21,92 +20,3 @@ object SourceInserter {
(prefixes mkString "\n").toArray
}
}
-class SourceInserter(contents: Array[Char], start: Int = 0, tabInc: Int = 8) extends Writer {
-
- private var buf = contents
- private var offset = start
- private var hilen = contents.length
-
- def length = offset + hilen
-
- private def currentColumn: Int = {
- var i = offset
- while (i > 0 && !isLineBreakChar(buf(i - 1))) i -= 1
- var col = 0
- while (i < offset) {
- col = if (buf(i) == '\t') (col + tabInc) / tabInc * tabInc else col + 1
- i += 1
- }
- col
- }
-
- private var col = currentColumn
-
- def column = synchronized { col }
-
- private def addCapacity(n: Int) = {
- val newlength = length + n
- while (newlength > buf.length) {
- val buf1 = Array.ofDim[Char](buf.length * 2)
- Array.copy(buf, 0, buf1, 0, offset)
- Array.copy(buf, buf.length - hilen, buf1, buf1.length - hilen, hilen)
- buf = buf1
- }
- }
-
- private def insertChar(ch: Char) = {
-// Console.err.print("["+ch+"]")
- buf(offset) = ch
- offset += 1
- ch match {
- case LF => col = 0
- case '\t' => col = (col + tabInc) / tabInc * tabInc
- case _ => col += 1
- }
- }
-
- override def write(ch: Int) = synchronized {
- addCapacity(1)
- insertChar(ch.toChar)
- }
-
- override def write(chs: Array[Char], off: Int, len: Int) = synchronized {
- addCapacity(len)
- for (i <- off until off + len) insertChar(chs(i))
- }
-
- override def close() {
- }
-
- override def flush() {
- // signal buffer change
- }
-
- def currentContents = synchronized {
- if (length == buf.length) buf
- else {
- val res = Array.ofDim[Char](length)
- Array.copy(buf, 0, res, 0, offset)
- Array.copy(buf, buf.length - hilen, res, offset, hilen)
- res
- }
- }
-
- def backspace() = synchronized {
- offset -= 1
- if (offset > 0 && buf(offset) == LF && buf(offset - 1) == CR) offset -=1
- }
-
- def currentChar = synchronized {
- buf(buf.length - hilen)
- }
-
- def skip(len: Int) = synchronized {
- for (i <- 0 until len) {
- val ch = currentChar
- hilen -= 1
- insertChar(ch)
- }
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 120ada965a..78b56a8596 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -15,7 +15,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
type Setting <: AbsSetting // Fix to the concrete Setting type
type ResultOfTryToSet // List[String] in mutable, (Settings, List[String]) in immutable
def errorFn: String => Unit
- protected def allSettings: collection.Set[Setting]
+ protected def allSettings: scala.collection.Set[Setting]
// settings minus internal usage settings
def visibleSettings = allSettings filterNot (_.isInternalOnly)
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 116eed0f31..2ff81ae603 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -536,7 +536,7 @@ class MutableSettings(val errorFn: String => Unit)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
- def unparse: List[String] = name :: value
+ def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 19392ec23a..3ff7af791b 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -8,10 +8,10 @@ package scala.tools
package nsc
package settings
-import annotation.elidable
+import scala.annotation.elidable
import scala.tools.util.PathResolver.Defaults
import scala.collection.mutable
-import language.{implicitConversions, existentials}
+import scala.language.{implicitConversions, existentials}
trait ScalaSettings extends AbsScalaSettings
with StandardScalaSettings
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 16f8685a87..72284cc940 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -18,6 +18,7 @@ trait Warnings {
// These warnings are all so noisy as to be useless in their
// present form, but have the potential to offer useful info.
protected def allWarnings = lintWarnings ++ List(
+ warnDeadCode,
warnSelectNullable,
warnValueDiscard,
warnNumericWiden
@@ -25,7 +26,7 @@ trait Warnings {
// These warnings should be pretty quiet unless you're doing
// something inadvisable.
protected def lintWarnings = List(
- warnDeadCode,
+ // warnDeadCode,
warnInaccessible,
warnNullaryOverride,
warnNullaryUnit,
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 213a995e96..369b6aa77d 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,9 +10,9 @@ import java.io.IOException
import scala.compat.Platform.currentTime
import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
-import reflect.internal.Flags._
-import reflect.internal.MissingRequirementError
-import reflect.internal.util.Statistics
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.MissingRequirementError
+import scala.reflect.internal.util.Statistics
import scala.tools.nsc.io.{ AbstractFile, MsilFile }
/** This class ...
@@ -219,7 +219,7 @@ abstract class SymbolLoaders {
/**
* Load contents of a package
*/
- class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader {
+ class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader with FlagAgnosticCompleter {
protected def description = "package loader "+ classpath.name
protected def doComplete(root: Symbol) {
@@ -242,7 +242,7 @@ abstract class SymbolLoaders {
}
}
- class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
+ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends ClassfileParser {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
}
@@ -250,7 +250,7 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
- val start = Statistics.startTimer(classReadNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
classfileParser.parse(classfile, root)
if (root.associatedFile eq null) {
root match {
@@ -262,12 +262,12 @@ abstract class SymbolLoaders {
debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass))
}
}
- Statistics.stopTimer(classReadNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(classReadNanos, start)
}
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
- class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader {
+ class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter {
private def typ = msilFile.msilType
private object typeParser extends clr.TypeParser {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
@@ -277,14 +277,14 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
}
- class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
+ class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
protected def description = "source file "+ srcfile.toString
override def fromSource = true
override def sourcefile = Some(srcfile)
protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
}
- object moduleClassLoader extends SymbolLoader {
+ object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter {
protected def description = "module class loader"
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
@@ -300,6 +300,6 @@ abstract class SymbolLoaders {
}
object SymbolLoadersStats {
- import reflect.internal.TypesStats.typerNanos
+ import scala.reflect.internal.TypesStats.typerNanos
val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index c6bd236e8a..7e2741f6bc 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -6,4 +6,4 @@
package scala.tools.nsc
package symtab
-abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
+abstract class SymbolTable extends scala.reflect.internal.SymbolTable
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index c596eb014a..d9d25bf95a 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -7,8 +7,8 @@ package scala.tools.nsc
package symtab
import scala.collection.{ mutable, immutable }
-import language.implicitConversions
-import language.postfixOps
+import scala.language.implicitConversions
+import scala.language.postfixOps
/** Printing the symbol graph (for those symbols attached to an AST node)
* after each phase.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 32c9bff21b..8fd8dfaf83 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -844,7 +844,7 @@ abstract class ClassfileParser {
GenPolyType(ownTypeParams, tpe)
} // sigToType
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType {
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
}
@@ -869,7 +869,7 @@ abstract class ClassfileParser {
}
else in.skip(attrLen)
case tpnme.SyntheticATTR =>
- sym.setFlag(SYNTHETIC | HIDDEN)
+ sym.setFlag(SYNTHETIC | ARTIFACT)
in.skip(attrLen)
case tpnme.BridgeATTR =>
sym.setFlag(BRIDGE)
@@ -1164,7 +1164,7 @@ abstract class ClassfileParser {
originalName + " in " + outerName + "(" + externalName +")"
}
- object innerClasses extends collection.mutable.HashMap[Name, InnerClassEntry] {
+ object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
/** Return the Symbol of the top level class enclosing `name`,
* or 'name's symbol if no entry found for `name`.
*/
@@ -1228,7 +1228,7 @@ abstract class ClassfileParser {
}
}
- class LazyAliasType(alias: Symbol) extends LazyType {
+ class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun)
}
@@ -1273,7 +1273,7 @@ abstract class ClassfileParser {
sym.privateWithin = sym.enclosingTopLevelClass.owner
}
- @inline private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
- @inline private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
- @inline private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+ private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
+ private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
+ private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 437a5e1434..175c322786 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -37,7 +37,7 @@ abstract class ICodeReader extends ClassfileParser {
cls.info // ensure accurate type information
isScalaModule = cls.isModule && !cls.isJavaDefined
- log("Reading class: " + cls + " isScalaModule?: " + isScalaModule)
+ log("ICodeReader reading " + cls)
val name = cls.javaClassName
classPath.findSourceFile(name) match {
@@ -99,11 +99,9 @@ abstract class ICodeReader extends ClassfileParser {
if (sym == NoSymbol)
sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- log("Could not find symbol for " + name + ": " + tpe)
- log(owner.info.member(name).tpe + " : " + tpe)
sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
sym setInfoAndEnter tpe
- log("added " + sym + ": " + sym.tpe)
+ log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
(jflags, sym)
}
@@ -172,10 +170,7 @@ abstract class ICodeReader extends ClassfileParser {
}
else if (nme.isModuleName(name)) {
val strippedName = nme.stripModuleSuffix(name)
- val sym = forceMangledName(newTermName(strippedName.decode), true)
-
- if (sym == NoSymbol) rootMirror.getModule(strippedName)
- else sym
+ forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
}
else {
forceMangledName(name, false)
@@ -956,7 +951,7 @@ abstract class ICodeReader extends ClassfileParser {
case None =>
checkValidIndex
val l = freshLocal(idx, kind, false)
- log("Added new local for idx " + idx + ": " + kind)
+ debuglog("Added new local for idx " + idx + ": " + kind)
locals += (idx -> List((l, kind)))
l
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 3c34cf1c80..29b238c4cb 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -69,7 +69,11 @@ abstract class Pickler extends SubComponent {
}
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
- unit.error(t.pos, "macro has not been expanded")
+ unit.error(t.pos, t.symbol.typeParams.length match {
+ case 0 => "macro has not been expanded"
+ case 1 => "type parameter not specified"
+ case _ => "type parameters not specified"
+ })
return
}
}
@@ -512,7 +516,7 @@ abstract class Pickler extends SubComponent {
private def writeName(name: Name) {
ensureCapacity(name.length * 3)
val utfBytes = Codec toUTF8 name.toString
- compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length)
+ scala.compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length)
writeIndex += utfBytes.length
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
index fe66c515de..1f9a823bb4 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
@@ -2,6 +2,6 @@ package scala.tools.nsc.symtab
package object classfile {
- val ClassfileConstants = reflect.internal.ClassfileConstants
+ val ClassfileConstants = scala.reflect.internal.ClassfileConstants
}
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 5e52415ab2..1d2ffd2a73 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -12,7 +12,7 @@ import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute,
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.UnPickler
import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @author Nikolay Mihaylov
@@ -64,7 +64,7 @@ abstract class TypeParser {
busy = false
}
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType {
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
}
diff --git a/src/compiler/scala/tools/nsc/symtab/package.scala b/src/compiler/scala/tools/nsc/symtab/package.scala
index 1cf0d2c2ae..0e6719f225 100644
--- a/src/compiler/scala/tools/nsc/symtab/package.scala
+++ b/src/compiler/scala/tools/nsc/symtab/package.scala
@@ -2,6 +2,6 @@ package scala.tools.nsc
package object symtab {
- val Flags = reflect.internal.Flags
+ val Flags = scala.reflect.internal.Flags
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index a8cdee7154..18db1e6ab4 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -9,7 +9,7 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -79,12 +79,11 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// -optimise and not otherwise, but the classpath can use arbitrary
// logic so the classpath must be queried.
if (classPath.context.isValidName(implName + ".class")) {
- log("unlinking impl class " + implSym)
iface.owner.info.decls unlink implSym
NoSymbol
}
else {
- log("not unlinking existing " + implSym + " as the impl class is not visible on the classpath.")
+ log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
implSym
}
}
@@ -113,9 +112,10 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
iface.info
implClassMap.getOrElse(iface, atPhase(implClassPhase) {
- log("Creating implClass for " + iface)
- if (iface.implClass ne NoSymbol)
- log("%s.implClass already exists: %s".format(iface, iface.implClass))
+ if (iface.implClass eq NoSymbol)
+ debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
+ else
+ log(s"${iface.fullLocationString} impl class is ${iface.implClass.nameString}")
newImplClass(iface)
})
@@ -132,12 +132,12 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
* - for every interface member of iface: its implementation method, if one is needed
* - every former member of iface that is implementation only
*/
- private class LazyImplClassType(iface: Symbol) extends LazyType {
+ private class LazyImplClassType(iface: Symbol) extends LazyType with FlagAgnosticCompleter {
/** Compute the decls of implementation class implClass,
* given the decls ifaceDecls of its interface.
*/
private def implDecls(implClass: Symbol, ifaceDecls: Scope): Scope = {
- log("LazyImplClassType calculating decls for " + implClass)
+ debuglog("LazyImplClassType calculating decls for " + implClass)
val decls = newScope
if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol) {
@@ -152,16 +152,16 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
for (sym <- ifaceDecls) {
if (isInterfaceMember(sym)) {
if (needsImplMethod(sym)) {
- log("Cloning " + sym + " for implementation method in " + implClass)
val clone = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
if (currentRun.compiles(implClass)) implMethodMap(sym) = clone
decls enter clone
sym setFlag lateDEFERRED
+ if (!sym.isSpecialized)
+ log(s"Cloned ${sym.name} from ${sym.owner} into implClass ${implClass.fullName}")
}
- else log(sym + " needs no implementation method in " + implClass)
}
else {
- log("Destructively modifying owner of %s from %s to %s".format(sym, sym.owner, implClass))
+ log(s"Destructively modifying owner of $sym from ${sym.owner} to $implClass")
sym.owner = implClass
// note: OK to destructively modify the owner here,
// because symbol will not be accessible from outside the sourcefile.
@@ -174,7 +174,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
override def complete(implSym: Symbol) {
- log("LazyImplClassType completing " + implSym)
+ debuglog("LazyImplClassType completing " + implSym)
/** If `tp` refers to a non-interface trait, return a
* reference to its implementation class. Otherwise return `tp`.
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 108c5ced6f..fa7a53f888 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyrights 2005-2011 LAMP/EPFL
+ * Copyright 2005-2012 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,7 +9,7 @@ package transform
import symtab._
import Flags._
import scala.collection._
-import language.postfixOps
+import scala.language.postfixOps
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
@@ -23,12 +23,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
- private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
- private val staticBodies = mutable.Map.empty[(Symbol, Symbol), Tree]
- private val syntheticClasses = mutable.Map.empty[Symbol, mutable.Set[Tree]] // package and trees
- private val classNames = mutable.Map.empty[Symbol, Set[Name]]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
private def clearStatics() {
newStaticMembers.clear()
newStaticInits.clear()
@@ -48,16 +45,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
result
}
private def transformTemplate(tree: Tree) = {
- val t @ Template(parents, self, body) = tree
+ val Template(parents, self, body) = tree
clearStatics()
-
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
try addStaticInits(templ) // postprocess to include static ctors
finally clearStatics()
}
private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
-
+
//private val classConstantMeth = new HashMap[String, Symbol]
//private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
@@ -91,40 +87,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
- override def transform(tree: Tree): Tree = tree match {
-
- /* Transforms dynamic calls (i.e. calls to methods that are undefined
- * in the erased type space) to -- dynamically -- unsafe calls using
- * reflection. This is used for structural sub-typing of refinement
- * types, but may be used for other dynamic calls in the future.
- * For 'a.f(b)' it will generate something like:
- * 'a.getClass().
- * ' getMethod("f", Array(classOf[b.type])).
- * ' invoke(a, Array(b))
- * plus all the necessary casting/boxing/etc. machinery required
- * for type-compatibility (see fixResult).
- *
- * USAGE CONTRACT:
- * There are a number of assumptions made on the way a dynamic apply
- * is used. Assumptions relative to type are handled by the erasure
- * phase.
- * - The applied arguments are compatible with AnyRef, which means
- * that an argument tree typed as AnyVal has already been extended
- * with the necessary boxing calls. This implies that passed
- * arguments might not be strictly compatible with the method's
- * parameter types (a boxed integer while int is expected).
- * - The expected return type is an AnyRef, even when the method's
- * return type is an AnyVal. This means that the tree containing the
- * call has already been extended with the necessary unboxing calls
- * (or is happy with the boxed type).
- * - The type-checker has prevented dynamic applies on methods which
- * parameter's erased types are not statically known at the call site.
- * This is necessary to allow dispatching the call to the correct
- * method (dispatching on parameters is static in Scala). In practice,
- * this limitation only arises when the called method is defined as a
- * refinement, where the refinement defines a parameter based on a
- * type variable. */
- case ad@ApplyDynamic(qual0, params) =>
+ def transformApplyDynamic(ad: ApplyDynamic) = {
+ val qual0 = ad.qual
+ val params = ad.args
if (settings.logReflectiveCalls.value)
unit.echo(ad.pos, "method invocation uses reflection")
@@ -382,8 +347,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** Normal non-Array call */
def genDefaultCall = {
// reflective method call machinery
- val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
- def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
+ val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
+ def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache
def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
@@ -516,6 +481,44 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
transform(t)
}
/* ### END OF DYNAMIC APPLY TRANSFORM ### */
+ }
+
+ override def transform(tree: Tree): Tree = tree match {
+
+ /* Transforms dynamic calls (i.e. calls to methods that are undefined
+ * in the erased type space) to -- dynamically -- unsafe calls using
+ * reflection. This is used for structural sub-typing of refinement
+ * types, but may be used for other dynamic calls in the future.
+ * For 'a.f(b)' it will generate something like:
+ * 'a.getClass().
+ * ' getMethod("f", Array(classOf[b.type])).
+ * ' invoke(a, Array(b))
+ * plus all the necessary casting/boxing/etc. machinery required
+ * for type-compatibility (see fixResult).
+ *
+ * USAGE CONTRACT:
+ * There are a number of assumptions made on the way a dynamic apply
+ * is used. Assumptions relative to type are handled by the erasure
+ * phase.
+ * - The applied arguments are compatible with AnyRef, which means
+ * that an argument tree typed as AnyVal has already been extended
+ * with the necessary boxing calls. This implies that passed
+ * arguments might not be strictly compatible with the method's
+ * parameter types (a boxed integer while int is expected).
+ * - The expected return type is an AnyRef, even when the method's
+ * return type is an AnyVal. This means that the tree containing the
+ * call has already been extended with the necessary unboxing calls
+ * (or is happy with the boxed type).
+ * - The type-checker has prevented dynamic applies on methods which
+ * parameter's erased types are not statically known at the call site.
+ * This is necessary to allow dispatching the call to the correct
+ * method (dispatching on parameters is static in Scala). In practice,
+ * this limitation only arises when the called method is defined as a
+ * refinement, where the refinement defines a parameter based on a
+ * type variable. */
+
+ case tree: ApplyDynamic =>
+ transformApplyDynamic(tree)
/* Some cleanup transformations add members to templates (classes, traits, etc).
* When inside a template (i.e. the body of one of its members), two maps
@@ -542,78 +545,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else tree
}
-
- case ValDef(mods, name, tpt, rhs) if tree.symbol.hasStaticAnnotation =>
- log("moving @static valdef field: " + name + ", in: " + tree.symbol.owner)
- val sym = tree.symbol
- val owner = sym.owner
-
- val staticBeforeLifting = atPhase(currentRun.erasurePhase) { owner.isStatic }
- val isPrivate = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(PRIVATE) }
- val isProtected = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(PROTECTED) }
- val isLazy = atPhase(currentRun.typerPhase) { sym.getter(owner).hasFlag(LAZY) }
- if (!owner.isModuleClass || !staticBeforeLifting) {
- if (!sym.isSynthetic) {
- reporter.error(tree.pos, "Only members of top-level objects and their nested objects can be annotated with @static.")
- tree.symbol.removeAnnotation(StaticClass)
- }
- super.transform(tree)
- } else if (isPrivate || isProtected) {
- reporter.error(tree.pos, "The @static annotation is only allowed on public members.")
- tree.symbol.removeAnnotation(StaticClass)
- super.transform(tree)
- } else if (isLazy) {
- reporter.error(tree.pos, "The @static annotation is not allowed on lazy members.")
- tree.symbol.removeAnnotation(StaticClass)
- super.transform(tree)
- } else if (owner.isModuleClass) {
- val linkedClass = owner.companionClass match {
- case NoSymbol =>
- // create the companion class if it does not exist
- val enclosing = owner.owner
- val compclass = enclosing.newClass(newTypeName(owner.name.toString))
- compclass setInfo ClassInfoType(List(ObjectClass.tpe), newScope, compclass)
- enclosing.info.decls enter compclass
-
- val compclstree = ClassDef(compclass, NoMods, List(List()), List(List()), List(), tree.pos)
-
- syntheticClasses.getOrElseUpdate(enclosing, mutable.Set()) += compclstree
-
- compclass
- case comp => comp
- }
-
- // create a static field in the companion class for this @static field
- val stfieldSym = linkedClass.newVariable(newTermName(name), tree.pos, STATIC | SYNTHETIC | FINAL) setInfo sym.tpe
- stfieldSym.addAnnotation(StaticClass)
-
- val names = classNames.getOrElseUpdate(linkedClass, linkedClass.info.decls.collect {
- case sym if sym.name.isTermName => sym.name
- } toSet)
- if (names(stfieldSym.name)) {
- reporter.error(
- tree.pos,
- "@static annotated field " + tree.symbol.name + " has the same name as a member of class " + linkedClass.name
- )
- } else {
- linkedClass.info.decls enter stfieldSym
-
- val initializerBody = rhs
-
- // static field was previously initialized in the companion object itself, like this:
- // staticBodies((linkedClass, stfieldSym)) = Select(This(owner), sym.getter(owner))
- // instead, we move the initializer to the static ctor of the companion class
- // we save the entire ValDef/DefDef to extract the rhs later
- staticBodies((linkedClass, stfieldSym)) = tree
- }
- }
- super.transform(tree)
-
+
/* MSIL requires that the stack is empty at the end of a try-block.
* Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
* store their result in a local variable. The catch blocks are adjusted as well.
* The try tree is subsituted by a block whose result expression is read of that variable. */
case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
+ def transformTry = {
val tpe = theTry.tpe.widen
val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
@@ -624,7 +562,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val newTry = Try(newBlock, newCatches, super.transform(finalizer))
typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar)))
-
+ }
+ transformTry
/*
* This transformation should identify Scala symbol invocations in the tree and replace them
* with references to a static member. Also, whenever a class has at least a single symbol invocation
@@ -657,12 +596,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* have little in common.
*/
case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
+ def transformApply = {
// add the symbol name to a map if it's not there already
val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
// create a reference to a static field
val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
super.transform(ntree)
+ }
+ transformApply
// This transform replaces Array(Predef.wrapArray(Array(...)), <tag>)
// with just Array(...)
@@ -718,11 +660,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (newStaticInits.isEmpty)
template
else {
- val ctorBody = newStaticInits.toList flatMap {
- case Block(stats, expr) => stats :+ expr
- case t => List(t)
- }
-
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
@@ -731,76 +668,22 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
deriveDefDef(ctor) {
case block @ Block(stats, expr) =>
// need to add inits to existing block
- treeCopy.Block(block, ctorBody ::: stats, expr)
+ treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
case term: TermTree =>
// need to create a new block with inits and the old term
- treeCopy.Block(term, ctorBody, term)
+ treeCopy.Block(term, newStaticInits.toList, term)
}
case _ =>
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
- val rhs = Block(ctorBody, Literal(Constant(())))
+ val rhs = Block(newStaticInits.toList, Literal(Constant(())))
localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
deriveTemplate(template)(newCtor :: _)
}
}
-
- private def addStaticDeclarations(tree: Template, clazz: Symbol) {
- // add static field initializer statements for each static field in clazz
- if (!clazz.isModuleClass) for {
- staticSym <- clazz.info.decls
- if staticSym.hasStaticAnnotation
- } staticSym match {
- case stfieldSym if stfieldSym.isVariable =>
- val valdef = staticBodies((clazz, stfieldSym))
- val ValDef(_, _, _, rhs) = valdef
- val fixedrhs = rhs.changeOwner((valdef.symbol, clazz.info.decl(nme.CONSTRUCTOR)))
-
- val stfieldDef = localTyper.typedPos(tree.pos)(VAL(stfieldSym) === EmptyTree)
- val flattenedInit = fixedrhs match {
- case Block(stats, expr) => Block(stats, REF(stfieldSym) === expr)
- case rhs => REF(stfieldSym) === rhs
- }
- val stfieldInit = localTyper.typedPos(tree.pos)(flattenedInit)
-
- // add field definition to new defs
- newStaticMembers append stfieldDef
- newStaticInits append stfieldInit
- }
- }
-
-
-
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- super.transformStats(stats, exprOwner) ++ {
- // flush pending synthetic classes created in this owner
- val synthclassdefs = syntheticClasses.get(exprOwner).toList.flatten
- syntheticClasses -= exprOwner
- synthclassdefs map {
- cdef => localTyper.typedPos(cdef.pos)(cdef)
- }
- } map {
- case clsdef @ ClassDef(mods, name, tparams, t @ Template(parent, self, body)) =>
- // process all classes in the package again to add static initializers
- clearStatics()
-
- addStaticDeclarations(t, clsdef.symbol)
-
- val templ = deriveTemplate(t)(_ => transformTrees(newStaticMembers.toList) ::: body)
- val ntempl =
- try addStaticInits(templ)
- finally clearStatics()
-
- val derived = deriveClassDef(clsdef)(_ => ntempl)
- classNames.remove(clsdef.symbol)
- derived
-
- case stat => stat
- }
- }
-
+
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index f2f4a44b02..23b15a9033 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -186,15 +186,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// before the superclass constructor call, otherwise it goes after.
// Lazy vals don't get the assignment in the constructor.
if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (stat.symbol.hasStaticAnnotation) {
- debuglog("@static annotated field initialization skipped.")
- defBuf += deriveValDef(stat)(tree => tree)
- } else if (rhs != EmptyTree && !stat.symbol.isLazy) {
+ if (rhs != EmptyTree && !stat.symbol.isLazy) {
val rhs1 = intoConstructor(stat.symbol, rhs);
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
- defBuf += deriveValDef(stat)(_ => EmptyTree)
}
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
case ClassDef(_, _, _, _) =>
// classes are treated recursively, and left in the template
@@ -506,14 +503,14 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val applyMethodDef = DefDef(
sym = applyMethod,
- vparamss = List(List()),
+ vparamss = ListOfNil,
rhs = Block(applyMethodStats, gen.mkAttributedRef(BoxedUnit_UNIT)))
ClassDef(
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
- argss = List(List()),
+ argss = ListOfNil,
body = List(applyMethodDef),
superPos = impl.pos)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index eb3c965d7f..3ac7dd2a8f 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -12,7 +12,7 @@ import symtab._
import Flags._
abstract class Erasure extends AddInterfaces
- with reflect.internal.transform.Erasure
+ with scala.reflect.internal.transform.Erasure
with typechecker.Analyzer
with TypingTransformers
with ast.TreeDSL
@@ -167,6 +167,8 @@ abstract class Erasure extends AddInterfaces
case tp => tp :: Nil
}
+ private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
@@ -223,6 +225,24 @@ abstract class Erasure extends AddInterfaces
} else {
boxedSig(tp)
}
+ def classSig = {
+ val preRebound = pre.baseType(sym.owner) // #2585
+ dotCleanup(
+ (
+ if (needsJavaSig(preRebound)) {
+ val s = jsig(preRebound, existentiallyBound)
+ if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + "." + sym.javaSimpleName
+ else fullNameInSig(sym)
+ }
+ else fullNameInSig(sym)
+ ) + (
+ if (args.isEmpty) "" else
+ "<"+(args map argSig).mkString+">"
+ ) + (
+ ";"
+ )
+ )
+ }
// If args isEmpty, Array is being used as a type constructor
if (sym == ArrayClass && args.nonEmpty) {
@@ -246,33 +266,32 @@ abstract class Erasure extends AddInterfaces
else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
else abbrvTag(sym).toString
}
- else if (sym.isClass) {
- val preRebound = pre.baseType(sym.owner) // #2585
- dotCleanup(
- (
- if (needsJavaSig(preRebound)) {
- val s = jsig(preRebound, existentiallyBound)
- if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + "." + sym.javaSimpleName
- else fullNameInSig(sym)
- }
- else fullNameInSig(sym)
- ) + (
- if (args.isEmpty) "" else
- "<"+(args map argSig).mkString+">"
- ) + (
- ";"
- )
- )
+ else if (sym.isDerivedValueClass) {
+ val unboxed = sym.derivedValueClassUnbox.info.finalResultType
+ val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType
+ def unboxedMsg = if (unboxed == unboxedSeen) "" else s", seen within ${sym.simpleName} as $unboxedSeen"
+ logResult(s"Erasure of value class $sym (underlying type $unboxed$unboxedMsg) is") {
+ if (isPrimitiveValueType(unboxedSeen) && !primitiveOK)
+ classSig
+ else
+ jsig(unboxedSeen, existentiallyBound, toplevel, primitiveOK)
+ }
}
- else jsig(erasure(sym0)(tp), existentiallyBound, toplevel, primitiveOK)
+ else if (sym.isClass)
+ classSig
+ else
+ jsig(erasure(sym0)(tp), existentiallyBound, toplevel, primitiveOK)
case PolyType(tparams, restpe) =>
assert(tparams.nonEmpty)
val poly = if (toplevel) polyParamSig(tparams) else ""
poly + jsig(restpe)
case MethodType(params, restpe) =>
- "("+(params map (_.tpe) map (jsig(_))).mkString+")"+
- (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
+ val buf = new StringBuffer("(")
+ params foreach (p => buf append jsig(p.tpe))
+ buf append ")"
+ buf append (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
+ buf.toString
case RefinedType(parent :: _, decls) =>
boxedSig(parent)
@@ -323,7 +342,7 @@ abstract class Erasure extends AddInterfaces
}
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
- private lazy val interceptedMethods = Set[Symbol](Any_##, Object_##, Any_getClass) ++ (
+ private lazy val interceptedMethods = Set[Symbol](Any_##, Object_##, Any_getClass, AnyVal_getClass) ++ (
// Each value class has its own getClass for ultra-precise class object typing.
ScalaValueClasses map (_.tpe member nme.getClass_)
)
@@ -370,18 +389,18 @@ abstract class Erasure extends AddInterfaces
}
}
- class ComputeBridges(owner: Symbol) {
+ class ComputeBridges(unit: CompilationUnit, root: Symbol) {
assert(phase == currentRun.erasurePhase, phase)
var toBeRemoved = immutable.Set[Symbol]()
- val site = owner.thisType
+ val site = root.thisType
val bridgesScope = newScope
val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
var bridges = List[Tree]()
val opc = beforeExplicitOuter {
- new overridingPairs.Cursor(owner) {
- override def parents = List(owner.info.firstParent)
+ new overridingPairs.Cursor(root) {
+ override def parents = List(root.info.firstParent)
override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
}
}
@@ -399,8 +418,58 @@ abstract class Erasure extends AddInterfaces
(bridges, toBeRemoved)
}
+ /** Check that a bridge only overrides members that are also overridden by the original member.
+ * This test is necessary only for members that have a value class in their type.
+ * Such members are special because their types after erasure and after post-erasure differ/.
+ * This means we generate them after erasure, but the post-erasure transform might introduce
+ * a name clash. The present method guards against these name clashes.
+ *
+ * @param member The original member
+ * @param other The overidden symbol for which the bridge was generated
+ * @param bridge The bridge
+ */
+ def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Boolean = {
+ def fulldef(sym: Symbol) =
+ if (sym == NoSymbol) sym.toString
+ else s"$sym: ${sym.tpe} in ${sym.owner}"
+ var noclash = true
+ def clashError(what: String) = {
+ noclash = false
+ unit.error(
+ if (member.owner == root) member.pos else root.pos,
+ s"""bridge generated for member ${fulldef(member)}
+ |which overrides ${fulldef(other)}
+ |clashes with definition of $what;
+ |both have erased type ${afterPostErasure(bridge.tpe)}""".stripMargin)
+ }
+ for (bc <- root.baseClasses) {
+ if (settings.debug.value)
+ afterPostErasure(println(
+ s"""check bridge overrides in $bc
+ ${bc.info.nonPrivateDecl(bridge.name)}
+ ${site.memberType(bridge)}
+ ${site.memberType(bc.info.nonPrivateDecl(bridge.name) orElse IntClass)}
+ ${(bridge.matchingSymbol(bc, site))}""".stripMargin))
+
+ def overriddenBy(sym: Symbol) =
+ sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
+ for (overBridge <- afterPostErasure(overriddenBy(bridge))) {
+ if (overBridge == member) {
+ clashError("the member itself")
+ } else {
+ val overMembers = overriddenBy(member)
+ if (!overMembers.exists(overMember =>
+ afterPostErasure(overMember.tpe =:= overBridge.tpe))) {
+ clashError(fulldef(overBridge))
+ }
+ }
+ }
+ }
+ noclash
+ }
+
def checkPair(member: Symbol, other: Symbol) {
- val otpe = erasure(owner)(other.tpe)
+ val otpe = erasure(root)(other.tpe)
val bridgeNeeded = afterErasure (
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
@@ -414,24 +483,29 @@ abstract class Erasure extends AddInterfaces
return
val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
- val bridge = other.cloneSymbolImpl(owner, newFlags) setPos owner.pos
+ val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
debuglog("generating bridge from %s (%s): %s to %s: %s".format(
other, flagsToString(newFlags),
otpe + other.locationString, member,
- erasure(owner)(member.tpe) + member.locationString)
+ erasure(root)(member.tpe) + member.locationString)
)
// the parameter symbols need to have the new owner
bridge setInfo (otpe cloneInfo bridge)
bridgeTarget(bridge) = member
- afterErasure(owner.info.decls enter bridge)
- if (other.owner == owner) {
- afterErasure(owner.info.decls.unlink(other))
- toBeRemoved += other
+
+ if (!(member.tpe exists (_.typeSymbol.isDerivedValueClass)) ||
+ checkBridgeOverrides(member, other, bridge)) {
+ afterErasure(root.info.decls enter bridge)
+ if (other.owner == root) {
+ afterErasure(root.info.decls.unlink(other))
+ toBeRemoved += other
+ }
+
+ bridgesScope enter bridge
+ bridges ::= makeBridgeDefDef(bridge, member, other)
}
- bridgesScope enter bridge
- bridges ::= makeBridgeDefDef(bridge, member, other)
}
def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
@@ -463,7 +537,7 @@ abstract class Erasure extends AddInterfaces
val rhs = member.tpe match {
case MethodType(Nil, ConstantType(c)) => Literal(c)
case _ =>
- val sel: Tree = Select(This(owner), member)
+ val sel: Tree = Select(This(root), member)
val bridgingCall = (sel /: bridge.paramss)((fun, vparams) => Apply(fun, vparams map Ident))
maybeWrap(bridgingCall)
@@ -477,17 +551,15 @@ abstract class Erasure extends AddInterfaces
private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
- private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
-
private def isDifferentErasedValueType(tpe: Type, other: Type) =
isErasedValueType(tpe) && (tpe ne other)
private def isPrimitiveValueMember(sym: Symbol) =
sym != NoSymbol && isPrimitiveValueClass(sym.owner)
- private def box(tree: Tree, target: => String): Tree = {
+ @inline private def box(tree: Tree, target: => String): Tree = {
val result = box1(tree)
- log("boxing "+tree+":"+tree.tpe+" to "+target+" = "+result+":"+result.tpe)
+ log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
result
}
@@ -522,7 +594,7 @@ abstract class Erasure extends AddInterfaces
* fields (see TupleX). (ID)
*/
case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- log("boxing an unbox: " + tree + "/" + tree.symbol + " and replying with " + arg + " of type " + arg.tpe)
+ log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
arg
case _ =>
(REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
@@ -534,7 +606,7 @@ abstract class Erasure extends AddInterfaces
private def unbox(tree: Tree, pt: Type): Tree = {
val result = unbox1(tree, pt)
- log("unboxing "+tree+":"+tree.tpe+" to "+pt+" = "+result+":"+result.tpe)
+ log(s"unboxing ${tree.summaryString}: ${tree.tpe} with pt=$pt as type ${result.tpe}")
result
}
@@ -591,7 +663,7 @@ abstract class Erasure extends AddInterfaces
/** Generate a synthetic cast operation from tree.tpe to pt.
* @pre pt eq pt.normalize
*/
- private def cast(tree: Tree, pt: Type): Tree = {
+ private def cast(tree: Tree, pt: Type): Tree = logResult(s"cast($tree, $pt)") {
if (pt.typeSymbol == UnitClass) {
// See SI-4731 for one example of how this occurs.
log("Attempted to cast to Unit: " + tree)
@@ -611,7 +683,7 @@ abstract class Erasure extends AddInterfaces
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
- //if (settings.debug.value && pt != WildcardType)
+ if (settings.debug.value && pt != WildcardType)
log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
if (tree.tpe <:< pt)
tree
@@ -811,7 +883,6 @@ abstract class Erasure extends AddInterfaces
* but their erased types are the same.
*/
private def checkNoDoubleDefs(root: Symbol) {
- def afterErasure[T](op: => T): T = atPhase(phase.next.next)(op)
def doubleDefError(sym1: Symbol, sym2: Symbol) {
// the .toString must also be computed at the earlier phase
val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
@@ -827,7 +898,7 @@ abstract class Erasure extends AddInterfaces
sym2 + ":" + afterRefchecks(tpe2.toString) +
(if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
"\nhave same type" +
- (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterErasure(sym1.tpe)))
+ (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterPostErasure(sym1.tpe)))
sym1.setInfo(ErrorType)
}
@@ -837,7 +908,7 @@ abstract class Erasure extends AddInterfaces
if (e.sym.isTerm) {
var e1 = decls.lookupNextEntry(e)
while (e1 ne null) {
- if (afterErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
+ if (afterPostErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
e1 = decls.lookupNextEntry(e1)
}
}
@@ -851,7 +922,7 @@ abstract class Erasure extends AddInterfaces
|| !sym.hasTypeAt(currentRun.refchecksPhase.id))
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
- afterErasure(sym1.tpe =:= sym2.tpe)
+ afterPostErasure(sym1.tpe =:= sym2.tpe)
}
while (opc.hasNext) {
if (!afterRefchecks(
@@ -899,7 +970,7 @@ abstract class Erasure extends AddInterfaces
private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
assert(phase == currentRun.erasurePhase, phase)
debuglog("computing bridges for " + owner)
- new ComputeBridges(owner) compute()
+ new ComputeBridges(unit, owner) compute()
}
def addBridges(stats: List[Tree], base: Symbol): List[Tree] =
@@ -928,152 +999,177 @@ abstract class Erasure extends AddInterfaces
* - Reset all other type attributes to null, thus enforcing a retyping.
*/
private val preTransformer = new TypingTransformer(unit) {
- def preErase(tree: Tree): Tree = tree match {
- case ClassDef(_,_,_,_) =>
- debuglog("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
- copyClassDef(tree)(tparams = Nil)
- case DefDef(_,_,_,_,_,_) =>
- copyDefDef(tree)(tparams = Nil)
- case TypeDef(_, _, _, _) =>
- EmptyTree
- case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
- if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
- unboundedGenericArrayLevel(arg.tpe) > 0) =>
- val level = unboundedGenericArrayLevel(arg.tpe)
- def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level))))
-
- global.typer.typedPos(tree.pos) {
- if (level == 1) isArrayTest(qual)
- else gen.evalOnce(qual, currentOwner, unit) { qual1 =>
- gen.mkAnd(
- gen.mkMethodCall(
- qual1(),
- fun.symbol,
- List(specialErasure(fun.symbol)(arg.tpe)),
- Nil
- ),
- isArrayTest(qual1())
- )
- }
+
+ private def preEraseNormalApply(tree: Apply) = {
+ val fn = tree.fun
+ val args = tree.args
+
+ def qualifier = fn match {
+ case Select(qual, _) => qual
+ case TypeApply(Select(qual, _), _) => qual
+ }
+
+ def preEraseAsInstanceOf = {
+ (fn: @unchecked) match {
+ case TypeApply(Select(qual, _), List(targ)) =>
+ if (qual.tpe <:< targ.tpe)
+ atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
+ else if (isNumericValueClass(qual.tpe.typeSymbol) && isNumericValueClass(targ.tpe.typeSymbol))
+ atPos(tree.pos)(numericConversion(qual, targ.tpe.typeSymbol))
+ else
+ tree
}
- case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
- fun.symbol != Object_asInstanceOf &&
- fun.symbol != Object_isInstanceOf) =>
- // leave all other type tests/type casts, remove all other type applications
- preErase(fun)
- case Apply(fn @ Select(qual, name), args) if fn.symbol.owner == ArrayClass =>
- // Have to also catch calls to abstract types which are bounded by Array.
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) {
- // convert calls to apply/update/length on generic arrays to
- // calls of ScalaRunTime.array_xxx method calls
- global.typer.typedPos(tree.pos)({
- val arrayMethodName = name match {
- case nme.apply => nme.array_apply
- case nme.length => nme.array_length
- case nme.update => nme.array_update
- case nme.clone_ => nme.array_clone
- case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
+ // todo: also handle the case where the singleton type is buried in a compound
+ }
+
+ def preEraseIsInstanceOf = {
+ fn match {
+ case TypeApply(sel @ Select(qual, name), List(targ)) =>
+ if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
+ unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
+
+ def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
+ Apply(
+ TypeApply(
+ Select(q(), Object_isInstanceOf) setPos sel.pos,
+ List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
+ List()) setPos tree.pos
+ targ.tpe match {
+ case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
+ val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ atPos(tree.pos) {
+ Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
+ }
+ case RefinedType(parents, decls) if (parents.length >= 2) =>
+ // Optimization: don't generate isInstanceOf tests if the static type
+ // conforms, because it always succeeds. (Or at least it had better.)
+ // At this writing the pattern matcher generates some instance tests
+ // involving intersections where at least one parent is statically known true.
+ // That needs fixing, but filtering the parents here adds an additional
+ // level of robustness (in addition to the short term fix.)
+ val parentTests = parents filterNot (qual.tpe <:< _)
+
+ if (parentTests.isEmpty) Literal(Constant(true))
+ else gen.evalOnce(qual, currentOwner, unit) { q =>
+ atPos(tree.pos) {
+ parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
+ }
+ }
+ case _ =>
+ tree
}
- gen.mkRuntimeCall(arrayMethodName, qual :: args)
- })
- }
- else {
- // store exact array erasure in map to be retrieved later when we might
- // need to do the cast in adaptMember
- treeCopy.Apply(
- tree,
- SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
- args)
+ case _ => tree
}
- case Apply(fn @ Select(qual, _), Nil) if interceptedMethods(fn.symbol) =>
- if (fn.symbol == Any_## || fn.symbol == Object_##) {
- // This is unattractive, but without it we crash here on ().## because after
- // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
- // This must be because some earlier transformation is being skipped on ##, but so
- // far I don't know what. For null we now define null.## == 0.
- qual.tpe.typeSymbol match {
- case UnitClass | NullClass => LIT(0)
- case IntClass => qual
- case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
- case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
- case _ =>
- global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
- }
- }
- // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
- else if (isPrimitiveValueClass(qual.tpe.typeSymbol))
- global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
- else
- tree
+ }
+ if (fn.symbol == Any_asInstanceOf) {
+ preEraseAsInstanceOf
+ } else if (fn.symbol == Any_isInstanceOf) {
+ preEraseIsInstanceOf
+ } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
+ ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
+ } else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
+ Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
+ } else {
+ tree
+ }
+ }
- case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
-// println("inject derived: "+arg+" "+tpt.tpe)
- InjectDerivedValue(arg) addAttachment //@@@ setSymbol tpt.tpe.typeSymbol
- new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
- case Apply(fn, args) =>
- def qualifier = fn match {
- case Select(qual, _) => qual
- case TypeApply(Select(qual, _), _) => qual
- }
- if (fn.symbol == Any_asInstanceOf)
- (fn: @unchecked) match {
- case TypeApply(Select(qual, _), List(targ)) =>
- if (qual.tpe <:< targ.tpe)
- atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
- else if (isNumericValueClass(qual.tpe.typeSymbol) && isNumericValueClass(targ.tpe.typeSymbol))
- atPos(tree.pos)(numericConversion(qual, targ.tpe.typeSymbol))
- else
- tree
+ private def preEraseApply(tree: Apply) = {
+ tree.fun match {
+ case TypeApply(fun @ Select(qual, name), args @ List(arg))
+ if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
+ unboundedGenericArrayLevel(arg.tpe) > 0) => // !!! todo: simplify by having GenericArray also extract trees
+ val level = unboundedGenericArrayLevel(arg.tpe)
+ def isArrayTest(arg: Tree) =
+ gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level))))
+
+ global.typer.typedPos(tree.pos) {
+ if (level == 1) isArrayTest(qual)
+ else gen.evalOnce(qual, currentOwner, unit) { qual1 =>
+ gen.mkAnd(
+ gen.mkMethodCall(
+ qual1(),
+ fun.symbol,
+ List(specialErasure(fun.symbol)(arg.tpe)),
+ Nil
+ ),
+ isArrayTest(qual1())
+ )
+ }
}
- // todo: also handle the case where the singleton type is buried in a compound
- else if (fn.symbol == Any_isInstanceOf) {
- fn match {
- case TypeApply(sel @ Select(qual, name), List(targ)) =>
- if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
- unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
-
- def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
- Apply(
- TypeApply(
- Select(q(), Object_isInstanceOf) setPos sel.pos,
- List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
- List()) setPos tree.pos
- targ.tpe match {
- case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
- atPos(tree.pos) {
- Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
- }
- case RefinedType(parents, decls) if (parents.length >= 2) =>
- // Optimization: don't generate isInstanceOf tests if the static type
- // conforms, because it always succeeds. (Or at least it had better.)
- // At this writing the pattern matcher generates some instance tests
- // involving intersections where at least one parent is statically known true.
- // That needs fixing, but filtering the parents here adds an additional
- // level of robustness (in addition to the short term fix.)
- val parentTests = parents filterNot (qual.tpe <:< _)
-
- if (parentTests.isEmpty) Literal(Constant(true))
- else gen.evalOnce(qual, currentOwner, unit) { q =>
- atPos(tree.pos) {
- parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
- }
- }
- case _ =>
- tree
+ case fn @ Select(qual, name) =>
+ val args = tree.args
+ if (fn.symbol.owner == ArrayClass) {
+ // Have to also catch calls to abstract types which are bounded by Array.
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) {
+ // convert calls to apply/update/length on generic arrays to
+ // calls of ScalaRunTime.array_xxx method calls
+ global.typer.typedPos(tree.pos) {
+ val arrayMethodName = name match {
+ case nme.apply => nme.array_apply
+ case nme.length => nme.array_length
+ case nme.update => nme.array_update
+ case nme.clone_ => nme.array_clone
+ case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
+ }
+ gen.mkRuntimeCall(arrayMethodName, qual :: args)
}
- case _ => tree
- }
- } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
- ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
- } else if (fn.symbol.isMethodWithExtension) {
- Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
- } else {
+ } else {
+ // store exact array erasure in map to be retrieved later when we might
+ // need to do the cast in adaptMember
+ treeCopy.Apply(
+ tree,
+ SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
+ args)
+ }
+ } else if (args.isEmpty && interceptedMethods(fn.symbol)) {
+ if (fn.symbol == Any_## || fn.symbol == Object_##) {
+ // This is unattractive, but without it we crash here on ().## because after
+ // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
+ // This must be because some earlier transformation is being skipped on ##, but so
+ // far I don't know what. For null we now define null.## == 0.
+ qual.tpe.typeSymbol match {
+ case UnitClass | NullClass => LIT(0)
+ case IntClass => qual
+ case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
+ case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
+ case _ =>
+ global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
+ }
+ } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
+ // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
+ global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
+ } else if (fn.symbol == AnyVal_getClass) {
+ tree setSymbol Object_getClass
+ } else {
tree
+ }
+ } else qual match {
+ case New(tpt) if name == nme.CONSTRUCTOR && tpt.tpe.typeSymbol.isDerivedValueClass =>
+ // println("inject derived: "+arg+" "+tpt.tpe)
+ val List(arg) = args
+ val attachment = new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
+ InjectDerivedValue(arg) updateAttachment attachment
+ case _ =>
+ preEraseNormalApply(tree)
}
+ case _ =>
+ preEraseNormalApply(tree)
+ }
+ }
+
+ def preErase(tree: Tree): Tree = tree match {
+ case tree: Apply =>
+ preEraseApply(tree)
+
+ case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
+ fun.symbol != Object_asInstanceOf &&
+ fun.symbol != Object_isInstanceOf) =>
+ // leave all other type tests/type casts, remove all other type applications
+ preErase(fun)
+
case Select(qual, name) =>
val owner = tree.symbol.owner
// println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
@@ -1120,6 +1216,14 @@ abstract class Erasure extends AddInterfaces
}
treeCopy.Literal(tree, Constant(erased))
+ case ClassDef(_,_,_,_) =>
+ debuglog("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
+ copyClassDef(tree)(tparams = Nil)
+ case DefDef(_,_,_,_,_,_) =>
+ copyDefDef(tree)(tparams = Nil)
+ case TypeDef(_, _, _, _) =>
+ EmptyTree
+
case _ =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 6fba6dcc39..77ad65957d 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -95,7 +95,7 @@ abstract class ExplicitOuter extends InfoTransform
else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
}
def newOuterAccessor(clazz: Symbol) = {
- val accFlags = SYNTHETIC | HIDDEN | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
@@ -104,7 +104,7 @@ abstract class ExplicitOuter extends InfoTransform
sym setInfo MethodType(Nil, restpe)
}
def newOuterField(clazz: Symbol) = {
- val accFlags = SYNTHETIC | HIDDEN | PARAMACCESSOR | ( if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED )
+ val accFlags = SYNTHETIC | ARTIFACT | PARAMACCESSOR | ( if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED )
val sym = clazz.newValue(nme.OUTER_LOCAL, clazz.pos, accFlags)
sym setInfo clazz.outerClass.thisType
@@ -497,16 +497,10 @@ abstract class ExplicitOuter extends InfoTransform
else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5)
case Select(qual, name) =>
- /** return closest enclosing method, unless shadowed by an enclosing class;
- * no use of closures here in the interest of speed.
- */
- def closestEnclMethod(from: Symbol): Symbol =
- if (from.isSourceMethod) from
- else if (from.isClass) NoSymbol
- else closestEnclMethod(from.owner)
-
+ // make not private symbol acessed from inner classes, as well as
+ // symbols accessed from @inline methods
if (currentClass != sym.owner ||
- (closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass))
+ (sym.owner.enclMethod hasAnnotation ScalaInlineClass))
sym.makeNotPrivate(sym.owner)
val qsym = qual.tpe.widen.typeSymbol
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 574c2c7049..8a9d0e58ec 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -28,9 +28,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
/** the following two members override abstract members in Transform */
val phaseName: String = "extmethods"
- /** The following flags may be set by this phase: */
- override def phaseNewFlags: Long = notPRIVATE
-
def newTransformer(unit: CompilationUnit): Transformer =
new Extender(unit)
@@ -73,7 +70,8 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val companionInfo = imeth.owner.companionModule.info
val candidates = extensionNames(imeth) map (companionInfo.decl(_))
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
- assert(matching.nonEmpty, "no extension method found for "+imeth+" among "+candidates+"/"+extensionNames(imeth))
+ assert(matching.nonEmpty,
+ s"no extension method found for $imeth:${imeth.tpe}+among ${candidates map (c => c.name+":"+c.tpe)} / ${extensionNames(imeth)}")
matching.head
}
@@ -114,7 +112,8 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth)
+ // No variance for method type parameters
+ var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
def transform(clonedType: Type): Type = clonedType match {
@@ -137,7 +136,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
tree match {
case Template(_, _, _) =>
if (currentOwner.isDerivedValueClass) {
- checkNonCyclic(currentOwner.pos, Set(), currentOwner)
+ /* This is currently redundant since value classes may not
+ wrap over other value classes anyway.
+ checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index 94eaba67d7..3bbf429fc2 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -22,12 +22,14 @@ abstract class Flatten extends InfoTransform {
*/
private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
val scope = sym.owner.info.decls
- val old = scope lookup sym.name
- if (old ne NoSymbol)
- scope unlink old
-
+ val old = scope lookup sym.name andAlso scope.unlink
scope enter sym
- log("lifted " + sym.fullLocationString)
+
+ if (old eq NoSymbol)
+ log(s"lifted ${sym.fullLocationString}")
+ else
+ log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
+
old
}
@@ -35,9 +37,7 @@ abstract class Flatten extends InfoTransform {
if (!sym.isLifted) {
sym setFlag LIFTED
debuglog("re-enter " + sym.fullLocationString)
- val old = replaceSymbolInCurrentScope(sym)
- if (old ne NoSymbol)
- log("unlinked " + old.fullLocationString + " after lifting " + sym)
+ replaceSymbolInCurrentScope(sym)
}
}
private def liftSymbol(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index b6d54f114e..c41ff20229 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -154,7 +154,7 @@ abstract class LambdaLift extends InfoTransform {
private def markCalled(sym: Symbol, owner: Symbol) {
debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
symSet(called, owner) addEntry sym
- if (sym.enclClass != owner.enclClass) calledFromInner addEntry sym
+ if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
/** The traverse function */
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index e8387c80f5..12e2433e0d 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -111,7 +111,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
var added = false
val stats =
for (stat <- body1) yield stat match {
- case Block(_, _) | Apply(_, _) | If(_, _, _) if !added =>
+ case Block(_, _) | Apply(_, _) | If(_, _, _) | Try(_, _, _) if !added =>
// Avoid adding bitmaps when they are fully overshadowed by those
// that are added inside loops
if (LocalLazyValFinder.find(stat)) {
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 3c828db7f3..2b0520592b 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -430,7 +430,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*
* Such fields will be nulled after the initializer has memoized the lazy value.
*/
- def singleUseFields(templ: Template): collection.Map[Symbol, List[Symbol]] = {
+ def singleUseFields(templ: Template): scala.collection.Map[Symbol, List[Symbol]] = {
val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
object SingleUseTraverser extends Traverser {
@@ -492,19 +492,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* fields count as fields defined by the class itself.
*/
private val fieldOffset = perRunCaches.newMap[Symbol, Int]()
-
+
private val bitmapKindForCategory = perRunCaches.newMap[Name, ClassSymbol]()
-
+
// ByteClass, IntClass, LongClass
private def bitmapKind(field: Symbol): ClassSymbol = bitmapKindForCategory(bitmapCategory(field))
-
+
private def flagsPerBitmap(field: Symbol): Int = bitmapKind(field) match {
case BooleanClass => 1
case ByteClass => 8
case IntClass => 32
case LongClass => 64
}
-
+
/** The first transform; called in a pre-order traversal at phase mixin
* (that is, every node is processed before its children).
@@ -718,7 +718,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val sym = clazz0.info.decl(bitmapName)
assert(!sym.isOverloaded, sym)
-
+
def createBitmap: Symbol = {
val bitmapKind = bitmapKindForCategory(category)
val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
@@ -732,7 +732,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case BooleanClass => VAL(sym) === FALSE
case _ => VAL(sym) === ZERO
}
-
+
sym setFlag PrivateLocal
clazz0.info.decls.enter(sym)
addDef(clazz0.pos, init)
@@ -744,7 +744,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else
createBitmap
}
-
+
def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
def realOffset = offset % flagsPerBitmap(sym)
if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
@@ -755,9 +755,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bmp = bitmapFor(clazz, offset, valSym)
def mask = maskForOffset(offset, valSym, kind)
def x = This(clazz) DOT bmp
- def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind))
+ def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind))
- x === newValue
+ x === newValue
}
/** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
@@ -775,7 +775,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else lhs GEN_!= (ZERO, kind)
}
}
-
+
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
@@ -791,14 +791,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
stats: List[Tree], retVal: Tree): Tree = {
mkFastPathBody(clazz, lzyVal, cond, syncBody, stats, retVal, gen.mkAttributedThis(clazz), List())
}
-
+
def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Tree = {
val slowPathSym: Symbol = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal, attrThis, args)
If(cond, fn (This(clazz), slowPathSym, args.map(arg => Ident(arg.symbol)): _*), retVal)
}
-
-
+
+
/** Always copy the tree if we are going to perform sym substitution,
* otherwise we will side-effect on the tree that is used in the fast path
*/
@@ -807,7 +807,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (tree.hasSymbol && from.contains(tree.symbol))
super.transform(tree.duplicate)
else super.transform(tree.duplicate)
-
+
override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
}
@@ -827,8 +827,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* The result will be a tree of the form
* { if ((bitmap&n & MASK) == 0) this.l$compute()
* else l$
- *
- * ...
+ *
+ * ...
* def l$compute() = { synchronized(this) {
* if ((bitmap$n & MASK) == 0) {
* init // l$ = <rhs>
@@ -836,7 +836,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* }}
* l$
* }
- *
+ *
* ...
* this.f1 = null
* ... this.fn = null
@@ -846,7 +846,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* For Int bitmap it is 32 and then 'n' in the above code is: (offset / 32),
* the MASK is (1 << (offset % 32)).
* If the class contains only a single lazy val then the bitmap is represented
- * as a Boolean and the condition checking is a simple bool test.
+ * as a Boolean and the condition checking is a simple bool test.
*/
def mkLazyDef(clazz: Symbol, lzyVal: Symbol, init: List[Tree], retVal: Tree, offset: Int): Tree = {
def nullify(sym: Symbol) = Select(This(clazz), sym.accessedOrSelf) === LIT(null)
@@ -878,7 +878,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
val sym = fieldSym.getter(fieldSym.owner)
val bitmapSym = bitmapFor(clazz, offset, sym)
- val kind = bitmapKind(sym)
+ val kind = bitmapKind(sym)
val mask = maskForOffset(offset, sym, kind)
val msg = "Uninitialized field: " + unit.source + ": " + pos.line
val result =
@@ -966,7 +966,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
stats flatMap {
case stat @ Assign(lhs @ Select(This(_), _), rhs) => stat :: checkedGetter(lhs)
// remove initialization for default values
- case Apply(lhs @ Select(Ident(self), _), List(EmptyTree)) if lhs.symbol.isSetter => Nil
+ case Apply(lhs @ Select(Ident(self), _), EmptyTree.asList) if lhs.symbol.isSetter => Nil
case stat => List(stat)
},
exprOwner
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 4401e3bd3e..0b58292f28 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -9,7 +9,7 @@ package transform
import scala.collection.mutable
import symtab.Flags._
import util.HashSet
-import annotation.tailrec
+import scala.annotation.tailrec
/** A class that yields a kind of iterator (`Cursor`),
* which yields all pairs of overriding/overridden symbols
@@ -104,8 +104,11 @@ abstract class OverridingPairs {
/** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
* linearization order.
+ * symbols that are not baseclasses map to -1.
*/
- private val index = new mutable.HashMap[Symbol, Int]
+ private val index = new mutable.HashMap[Symbol, Int] {
+ override def default(key: Symbol) = -1
+ }
// Note: overridingPairs can be called at odd instances by the Eclipse plugin
// Soemtimes symbols are not yet defined and we get missing keys.
@@ -133,28 +136,30 @@ abstract class OverridingPairs {
{ for (i <- List.range(0, size))
subParents(i) = new BitSet(size);
for (p <- parents) {
- index get p.typeSymbol match {
- case Some(pIndex) =>
- for (bc <- p.baseClasses)
- if (p.baseType(bc) =:= self.baseType(bc))
- index get bc match {
- case Some(bcIndex) =>
- include(subParents(bcIndex), pIndex)
- case None =>
- }
- else debuglog("SKIPPING "+p+" -> "+p.baseType(bc)+" / "+self.baseType(bc)+" from "+base)
- case None =>
- }
+ val pIndex = index(p.typeSymbol)
+ if (pIndex >= 0)
+ for (bc <- p.baseClasses)
+ if (p.baseType(bc) =:= self.baseType(bc)) {
+ val bcIndex = index(bc)
+ if (bcIndex >= 0)
+ include(subParents(bcIndex), pIndex)
+ }
}
}
/** Do `sym1` and `sym2` have a common subclass in `parents`?
* In that case we do not follow their overriding pairs
*/
- private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = (
- for (index1 <- index get sym1.owner ; index2 <- index get sym2.owner) yield
- intersectionContainsElementLeq(subParents(index1), subParents(index2), index1 min index2)
- ).exists(_ == true)
+ private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = {
+ val index1 = index(sym1.owner)
+ (index1 >= 0) && {
+ val index2 = index(sym2.owner)
+ (index2 >= 0) && {
+ intersectionContainsElementLeq(
+ subParents(index1), subParents(index2), index1 min index2)
+ }
+ }
+ }
/** The scope entries that have already been visited as overridden
* (maybe excluded because of hasCommonParentAsSubclass).
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 10a946c318..0fa50a255b 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -8,8 +8,8 @@ package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
-import language.postfixOps
-import language.existentials
+import scala.language.postfixOps
+import scala.language.existentials
/** Specialize code on types.
*
@@ -111,7 +111,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case TypeRef(_, GroupOfSpecializable, arg :: Nil) =>
arg.typeArgs map (_.typeSymbol)
case _ =>
- List(tp.typeSymbol)
+ tp.typeSymbol :: Nil
}
}
}
@@ -362,7 +362,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// creating each permutation of concrete types
def loop(ctypes: List[List[Type]]): List[List[Type]] = ctypes match {
case Nil => Nil
- case set :: Nil => set map (x => List(x))
+ case set :: Nil => set map (_ :: Nil)
case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
}
// zip the keys with each permutation to create a TypeEnv.
@@ -424,7 +424,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
- case TypeBounds(lo, hi) => specializedTypeVars(List(lo, hi))
+ case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet
case _ => Set()
}
@@ -436,7 +436,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val sClassMap = anyrefSpecCache.getOrElseUpdate(sClass, mutable.Map[Symbol, Symbol]())
sClassMap.getOrElseUpdate(tparam,
- tparam.cloneSymbol(sClass, tparam.flags, (tparam.name append tpnme.SPECIALIZED_SUFFIX).asInstanceOf[Name]) // [Eugene++] why do we need this cast?
+ tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
).tpe
}
@@ -452,7 +452,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def survivingParams(params: List[Symbol], env: TypeEnv) =
params filter {
p =>
- !p.isSpecialized ||
+ !p.isSpecialized ||
!env.contains(p) ||
!isPrimitiveValueType(env(p))
}
@@ -506,16 +506,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* was both already used for a map and mucho long. So "sClass" is the
* specialized subclass of "clazz" throughout this file.
*/
-
+
// SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is
// to force .info on them, as their lazy type will be evaluated and the symbols will be eliminated. Unfortunately
// evaluating the info after creating the specialized class will mess the specialized class signature, so we'd
- // better evaluate it before creating the new class symbol
+ // better evaluate it before creating the new class symbol
val clazzName = specializedName(clazz, env0).toTypeName
- val bytecodeClazz = clazz.owner.info.decl(clazzName)
+ val bytecodeClazz = clazz.owner.info.decl(clazzName)
// debuglog("Specializing " + clazz + ", but found " + bytecodeClazz + " already there")
bytecodeClazz.info
-
+
val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE)
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
@@ -652,11 +652,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(specMember) = Implementation(original)
typeEnv(specMember) = env ++ typeEnv(m)
- }
- else debuglog({
+ } else {
val om = forwardToOverload(m)
- "normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))
- })
+ debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om)))
+ }
}
else
debuglog("conflicting env for " + m + " env: " + env)
@@ -762,7 +761,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
-
+
val subclasses = specializations(clazz.info.typeParams) filter satisfiable
subclasses foreach {
env =>
@@ -798,7 +797,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
- // I think the last condition should be !sym.isHidden, but that made the
+ // I think the last condition should be !sym.isArtifact, but that made the
// compiler start warning about Tuple1.scala and Tuple2.scala claiming
// their type parameters are used in non-specializable positions. Why is
// unusedStvars.nonEmpty for these classes???
@@ -812,12 +811,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specializingOn = specializingOn filterNot (unusedStvars contains)
}
for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
+ // !!! Can't this logic be structured so that the new symbol's name is
+ // known when the symbol is cloned? It is much cleaner not to be mutating
+ // names after the fact. And it adds about a billion lines of
+ // "Renaming value _1 in class Tuple2 to _1$mcZ$sp" to obscure the small
+ // number of other (important) actual symbol renamings.
val tps = survivingParams(sym.info.typeParams, env0)
- val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~DEFERRED)
+ val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~DEFERRED) // <-- this needs newName = ...
val env = mapAnyRefsInSpecSym(env0, sym, specMember)
val (keys, vals) = env.toList.unzip
- specMember setName specializedName(sym, env)
+ specMember setName specializedName(sym, env) // <-- but the name is calculated based on the cloned symbol
// debuglog("%s normalizes to %s%s".format(sym, specMember,
// if (tps.isEmpty) "" else " with params " + tps.mkString(", ")))
@@ -898,7 +902,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
// this method properly duplicates the symbol's info
- ( sym.cloneSymbol(owner, newFlags, specializedName(sym, env))
+ ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
)
}
@@ -913,7 +917,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* this method will return List('apply$mcII$sp')
*/
- private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialOverrides(" + clazz + ")", _.nonEmpty) {
+ private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) {
/** Return the overridden symbol in syms that needs a specialized overriding symbol,
* together with its specialization environment. The overridden symbol may not be
* the closest to 'overriding', in a given hierarchy.
@@ -1006,7 +1010,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* Fails if such an environment cannot be found.
*
* If `strict` is true, a UnifyError is thrown if unification is impossible.
- *
+ *
* If `tparams` is true, then the methods tries to unify over type params in polytypes as well.
*/
private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean, tparams: Boolean = false): TypeEnv = (tp1, tp2) match {
@@ -1185,7 +1189,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
|| specializedTypeVars(t1).nonEmpty
|| specializedTypeVars(t2).nonEmpty)
}
-
+
env forall { case (tvar, tpe) =>
matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || {
if (warnings)
@@ -1201,7 +1205,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
-
+
def satisfiabilityConstraints(env: TypeEnv): Option[TypeEnv] = {
val noconstraints = Some(emptyEnv)
def matches(tpe1: Type, tpe2: Type): Option[TypeEnv] = {
@@ -1232,7 +1236,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} with typechecker.Duplicators {
private val (castfrom, castto) = casts.unzip
private object CastMap extends SubstTypeMap(castfrom.toList, castto.toList)
-
+
class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
override def castType(tree: Tree, pt: Type): Tree = {
// log(" expected type: " + pt)
@@ -1249,9 +1253,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
ntree
}
}
-
+
protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
-
+
}
/** A tree symbol substituter that substitutes on type skolems.
@@ -1359,7 +1363,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
}
-
+
def reportError[T](body: =>T)(handler: TypeError => T): T =
try body
catch {
@@ -1396,10 +1400,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else None
} else None
}
-
+
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
+ def transformNew = {
debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
val found = findSpec(tpt.tpe)
if (found.typeSymbol ne tpt.tpe.typeSymbol) {
@@ -1411,9 +1416,26 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
_ => super.transform(tree)
}
} else super.transform(tree)
+ }
+ transformNew
+
+ case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
+ if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
+ def transformSuperApply = {
+
+ def parents = sup.symbol.info.parents
+ debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
+
+ val res = localTyper.typed(
+ Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
+ debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
+ res
+ }
+ transformSuperApply
case TypeApply(sel @ Select(qual, name), targs)
if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
+ def transformTypeApply = {
debuglog("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
val qual1 = transform(qual)
// log(">>> TypeApply: " + tree + ", qual1: " + qual1)
@@ -1446,14 +1468,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// See pos/exponential-spec.scala - can't call transform on the whole tree again.
// super.transform(tree)
}
-
- case Select(Super(_, _), name) if illegalSpecializedInheritance(currentClass) =>
- val pos = tree.pos
- debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- debuglog(pos.lineContent)
- tree
+ }
+ transformTypeApply
case Select(qual, name) =>
+ def transformSelect = {
+ qual match {
+ case _: Super if illegalSpecializedInheritance(currentClass) =>
+ val pos = tree.pos
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
+ debuglog(pos.lineContent)
+ tree
+ case _ =>
+
debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
//log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
@@ -1489,6 +1516,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case None =>
super.transform(tree)
}
+ }
+ }
+ transformSelect
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
@@ -1498,6 +1528,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case Template(parents, self, body) =>
+ def transformTemplate = {
val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed)
if (!symbol.isPackageClass)
(new CollectMethodBodies)(tree)
@@ -1508,8 +1539,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
self,
atOwner(currentOwner)(transformTrees(body ::: specMembers)))
+ }
+ transformTemplate
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
+ def transformDefDef = {
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
@@ -1570,7 +1604,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
})
debuglog("created special overload tree " + t)
debuglog("created " + t)
- reportError {
+ reportError {
localTyper.typed(t)
} {
_ => super.transform(tree)
@@ -1598,8 +1632,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("abstract: " + targ)
localTyper.typed(deriveDefDef(tree)(rhs => rhs))
}
+ }
+ transformDefDef
case ValDef(_, _, _, _) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
+ def transformValDef = {
assert(body.isDefinedAt(symbol.alias), body)
val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
@@ -1613,25 +1650,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
typeEnv(symbol.alias) ++ typeEnv(tree.symbol)
)
deriveValDef(newValDef)(transform)
-
- case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
- if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
-
- def parents = sup.symbol.info.parents
- debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
-
- val res = localTyper.typed(
- Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
- debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
- res
+ }
+ transformValDef
case _ =>
super.transform(tree)
}
}
-
+
/** Duplicate the body of the given method `tree` to the new symbol `source`.
- *
+ *
* Knowing that the method can be invoked only in the `castmap` type environment,
* this method will insert casts for all the expressions of types mappend in the
* `castmap`.
@@ -1773,10 +1801,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def forwardCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = {
val argss = mmap(paramss)(x => Ident(x.symbol))
- def mkApply(fun: Tree, args: List[Tree]) = Apply(fun, args)
- atPos(pos) { (receiver /: argss) (mkApply) }
- // [Eugene++] no longer compiles after I moved the `Apply` case class into scala.reflect.internal
- // atPos(pos) { (receiver /: argss) (Apply) }
+ atPos(pos) { (receiver /: argss) (Apply.apply) }
}
/** Forward to the generic class constructor. If the current class initializes
@@ -1818,10 +1843,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else
Ident(x.symbol)
)
- def mkApply(fun: Tree, args: List[Tree]) = Apply(fun, args)
- atPos(pos) { (receiver /: argss) (mkApply) }
- // [Eugene++] no longer compiles after I moved the `Apply` case class into scala.reflect.internal
- // atPos(pos) { (receiver /: argss) (Apply) }
+ atPos(pos) { (receiver /: argss) (Apply.apply) }
}
/** Add method m to the set of symbols for which we need an implementation tree
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index b1b930ca2d..0ad6d6c677 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -90,7 +90,7 @@ abstract class TailCalls extends Transform {
private val defaultReason = "it contains a recursive call not in tail position"
/** Has the label been accessed? Then its symbol is in this set. */
- private val accessed = new collection.mutable.HashSet[Symbol]()
+ private val accessed = new scala.collection.mutable.HashSet[Symbol]()
// `accessed` was stored as boolean in the current context -- this is no longer tenable
// with jumps to labels in tailpositions now considered in tailposition,
// a downstream context may access the label, and the upstream one will be none the wiser
@@ -373,7 +373,7 @@ abstract class TailCalls extends Transform {
// the labels all look like: matchEnd(x) {x}
// then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr)
class TailPosLabelsTraverser extends Traverser {
- val tailLabels = new collection.mutable.HashSet[Symbol]()
+ val tailLabels = new scala.collection.mutable.HashSet[Symbol]()
private var maybeTail: Boolean = true // since we start in the rhs of a DefDef
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index 9e681b321c..82e95523d9 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -25,19 +25,14 @@ trait TypingTransformers {
protected var curTree: Tree = _
protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
- /** a typer for each enclosing class */
- val typers: mutable.Map[Symbol, analyzer.Typer] = new mutable.HashMap
-
- override def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
+ override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
- typers += Pair(owner, localTyper)
val result = super.atOwner(owner)(trans)
localTyper = savedLocalTyper
- typers -= owner
result
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e98bf519fe..5a3db26e30 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -8,7 +8,7 @@ package transform
import symtab.Flags._
import scala.collection.{ mutable, immutable }
-import language.postfixOps
+import scala.language.postfixOps
/*<export> */
/** - uncurry all symbol and tree types (@see UnCurryPhase) -- this includes normalizing all proper types.
@@ -35,8 +35,8 @@ import language.postfixOps
* - convert non-local returns to throws with enclosing try statements.
* - convert try-catch expressions in contexts where there might be values on the stack to
* a local method and a call to it (since an exception empties the evaluation stack):
- *
- * meth(x_1,..., try { x_i } catch { ..}, .. x_b0) ==>
+ *
+ * meth(x_1,..., try { x_i } catch { ..}, .. x_b0) ==>
* {
* def liftedTry$1 = try { x_i } catch { .. }
* meth(x_1, .., liftedTry$1(), .. )
@@ -44,7 +44,7 @@ import language.postfixOps
*/
/*</export> */
abstract class UnCurry extends InfoTransform
- with reflect.internal.transform.UnCurry
+ with scala.reflect.internal.transform.UnCurry
with TypingTransformers with ast.TreeDSL {
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -212,11 +212,6 @@ abstract class UnCurry extends InfoTransform
/** Undo eta expansion for parameterless and nullary methods */
def deEta(fun: Function): Tree = fun match {
- case Function(List(), Apply(expr, List())) if treeInfo.isExprSafeToInline(expr) =>
- if (expr hasSymbolWhich (_.isLazy))
- fun
- else
- expr
case Function(List(), expr) if isByNameRef(expr) =>
noApply += expr
expr
@@ -271,7 +266,7 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, List(List()), List(List()), List(applyMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
@@ -396,7 +391,7 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, List(List()), List(List()), List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
@@ -484,11 +479,7 @@ abstract class UnCurry extends InfoTransform
arg setType functionType(Nil, arg.tpe)
}
else {
- log("byname | %s | %s | %s".format(
- arg.pos.source.path + ":" + arg.pos.line, fun.fullName,
- if (fun.isPrivate) "private" else "")
- )
-
+ log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
arg match {
// don't add a thunk for by-name argument if argument already is an application of
// a Function0. We can then remove the application and use the existing Function0.
@@ -558,7 +549,7 @@ abstract class UnCurry extends InfoTransform
sym.setInfo(MethodType(List(), tree.tpe))
tree.changeOwner(currentOwner -> sym)
localTyper.typedPos(tree.pos)(Block(
- List(DefDef(sym, List(Nil), tree)),
+ List(DefDef(sym, ListOfNil, tree)),
Apply(Ident(sym), Nil)
))
}
@@ -641,7 +632,7 @@ abstract class UnCurry extends InfoTransform
case ret @ Return(_) if (isNonLocalReturn(ret)) =>
withNeedLift(true) { super.transform(ret) }
- case Try(_, Nil, _) =>
+ case Try(_, Nil, _) =>
// try-finally does not need lifting: lifting is needed only for try-catch
// expressions that are evaluated in a context where the stack might not be empty.
// `finally` does not attempt to continue evaluation after an exception, so the fact
@@ -694,6 +685,46 @@ abstract class UnCurry extends InfoTransform
tree
}
+ def isThrowable(pat: Tree): Boolean = pat match {
+ case Typed(Ident(nme.WILDCARD), tpt) =>
+ tpt.tpe =:= ThrowableClass.tpe
+ case Bind(_, pat) =>
+ isThrowable(pat)
+ case _ =>
+ false
+ }
+
+ def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
+
+ def postTransformTry(tree: Try) = {
+ val body = tree.block
+ val catches = tree.catches
+ val finalizer = tree.finalizer
+ if (opt.virtPatmat) {
+ if (catches exists (cd => !treeInfo.isCatchCase(cd)))
+ debugwarn("VPM BUG! illegal try/catch " + catches)
+ tree
+ } else if (catches forall treeInfo.isCatchCase) {
+ tree
+ } else {
+ val exname = unit.freshTermName("ex$")
+ val cases =
+ if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
+ else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
+ val catchall =
+ atPos(tree.pos) {
+ CaseDef(
+ Bind(exname, Ident(nme.WILDCARD)),
+ EmptyTree,
+ Match(Ident(exname), cases))
+ }
+ debuglog("rewrote try: " + catches + " ==> " + catchall);
+ val catches1 = localTyper.typedCases(
+ List(catchall), ThrowableClass.tpe, WildcardType)
+ treeCopy.Try(tree, body, catches1, finalizer)
+ }
+ }
+
tree match {
/* Some uncurry post transformations add members to templates.
*
@@ -711,8 +742,12 @@ abstract class UnCurry extends InfoTransform
}
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
+ val vparamss1 = vparamss0 match {
+ case _ :: Nil => vparamss0
+ case _ => vparamss0.flatten :: Nil
+ }
val flatdd = copyDefDef(dd)(
- vparamss = List(vparamss0.flatten),
+ vparamss = vparamss1,
rhs = nonLocalReturnKeys get dd.symbol match {
case Some(k) => atPos(rhs0.pos)(nonLocalReturnTry(rhs0, k, dd.symbol))
case None => rhs0
@@ -720,35 +755,12 @@ abstract class UnCurry extends InfoTransform
)
addJavaVarargsForwarders(dd, flatdd)
- case Try(body, catches, finalizer) =>
- if (opt.virtPatmat) { if(catches exists (cd => !treeInfo.isCatchCase(cd))) debugwarn("VPM BUG! illegal try/catch "+ catches); tree }
- else if (catches forall treeInfo.isCatchCase) tree
- else {
- val exname = unit.freshTermName("ex$")
- val cases =
- if ((catches exists treeInfo.isDefaultCase) || (catches.last match { // bq: handle try { } catch { ... case ex:Throwable => ...}
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) =>
- true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) =>
- true
- case _ =>
- false
- })) catches
- else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
- val catchall =
- atPos(tree.pos) {
- CaseDef(
- Bind(exname, Ident(nme.WILDCARD)),
- EmptyTree,
- Match(Ident(exname), cases))
- }
- debuglog("rewrote try: " + catches + " ==> " + catchall);
- val catches1 = localTyper.typedCases(
- List(catchall), ThrowableClass.tpe, WildcardType)
- treeCopy.Try(tree, body, catches1, finalizer)
- }
+ case tree: Try =>
+ postTransformTry(tree)
+
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
+
case Ident(name) =>
assert(name != tpnme.WILDCARD_STAR, tree)
applyUnary()
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 7f4f61bf80..399f9a1eac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
/** The main attribution phase.
*/
@@ -72,7 +72,7 @@ trait Analyzer extends AnyRef
}
object typerFactory extends SubComponent {
- import reflect.internal.TypesStats.typerNanos
+ import scala.reflect.internal.TypesStats.typerNanos
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
@@ -85,13 +85,13 @@ trait Analyzer extends AnyRef
// compiler run). This is good enough for the resident compiler, which was the most affected.
undoLog.clear()
override def run() {
- val start = Statistics.startTimer(typerNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null
global.echoPhaseSummary(this)
currentRun.units foreach applyPhase
undoLog.clear()
// need to clear it after as well or 10K+ accumulated entries are
// uncollectable the rest of the way.
- Statistics.stopTimer(typerNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index c7728ce389..d78efd8280 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -8,12 +8,19 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
-import symtab.Flags.{ PRIVATE, PROTECTED }
+import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR }
+import scala.compat.Platform.EOL
+import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.macros.runtime.AbortMacroException
+import scala.util.control.NonFatal
+import scala.tools.nsc.util.stackTraceString
trait ContextErrors {
self: Analyzer =>
import global._
+ import definitions._
+ import treeInfo._
object ErrorKinds extends Enumeration {
type ErrorKind = Value
@@ -78,10 +85,33 @@ trait ContextErrors {
def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
+
"type mismatch" + foundReqMsg(found, req) + missingArgsMsg
}
}
+ def notAnyRefMessage(found: Type): String = {
+ val tp = found.widen
+ def name = tp.typeSymbol.nameString
+ def parents = tp.parents filterNot isTrivialTopType
+ def onlyAny = tp.parents forall (_.typeSymbol == AnyClass)
+ def parents_s = ( if (parents.isEmpty) tp.parents else parents ) mkString ", "
+ def what = (
+ if (tp.typeSymbol.isAbstractType) {
+ val descr = if (onlyAny) "unbounded" else "bounded only by " + parents_s
+ s"$name is $descr, which means AnyRef is not a known parent"
+ }
+ else if (tp.typeSymbol.isAnonOrRefinementClass)
+ s"the parents of this type ($parents_s) extend Any, not AnyRef"
+ else
+ s"$name extends Any, not AnyRef"
+ )
+ if (isPrimitiveValueType(found) || isTrivialTopType(tp)) "" else "\n" +
+ s"""|Note that $what.
+ |Such types can participate in value classes, but instances
+ |cannot appear in singleton types or in reference comparisons.""".stripMargin
+ }
+
import ErrorUtils._
trait TyperContextErrors {
@@ -289,12 +319,17 @@ trait ContextErrors {
else
""
)
- companion + semicolon
+ val notAnyRef = (
+ if (ObjectClass.info.member(name).exists) notAnyRefMessage(target)
+ else ""
+ )
+ companion + notAnyRef + semicolon
}
+ def targetStr = targetKindString + target.directObjectString
withAddendum(qual.pos)(
- if (name == nme.CONSTRUCTOR) target + " does not have a constructor"
- else nameString + " is not a member of " + targetKindString + target.directObjectString + addendum
- )
+ if (name == nme.CONSTRUCTOR) s"$target does not have a constructor"
+ else s"$nameString is not a member of $targetStr$addendum"
+ )
}
issueNormalTypeError(sel, errMsg)
// the error has to be set for the copied tree, otherwise
@@ -320,16 +355,6 @@ trait ContextErrors {
setError(tree)
}
- def MacroEtaError(tree: Tree) = {
- issueNormalTypeError(tree, "macros cannot be eta-expanded")
- setError(tree)
- }
-
- def MacroPartialApplicationError(tree: Tree) = {
- issueNormalTypeError(tree, "macros cannot be partially applied")
- setError(tree)
- }
-
//typedReturn
def ReturnOutsideOfDefError(tree: Tree) = {
issueNormalTypeError(tree, "return outside method definition")
@@ -427,8 +452,11 @@ trait ContextErrors {
def AbstractionFromVolatileTypeError(vd: ValDef) =
issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
+ private[ContextErrors] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
+ "wrong number of type parameters for "+treeSymTypeMsg(fun)
+
def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
- issueNormalTypeError(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ issueNormalTypeError(tree, TypedApplyWrongNumberOfTpeParametersErrorMessage(fun))
setError(tree)
}
@@ -442,9 +470,6 @@ trait ContextErrors {
def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
NormalTypeError(tree, "macros application do not support named and/or default arguments")
- def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
-
def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
@@ -484,15 +509,22 @@ trait ContextErrors {
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
- def WrongNumberArgsPatternError(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "wrong number of arguments for "+treeSymTypeMsg(fun))
+ def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
def ApplyWithoutArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, fun.tpe+" does not take parameters")
+ // Dynamic
def DynamicVarArgUnsupported(tree: Tree, name: String) =
issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
+ def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
+ issueTypeError(PosAndMsgTypeError(err.errPos, err.errMsg +
+ s"\nerror after rewriting to $tree\npossible cause: maybe a wrong Dynamic method signature?"))
+ setError(tree)
+ }
+
//checkClassType
def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = {
issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix")
@@ -625,11 +657,112 @@ trait ContextErrors {
}
// cyclic errors
- def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
- issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
+ def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
+
+ def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+
+ // macro-related errors (also see MacroErrors below)
+
+ def MacroEtaError(tree: Tree) = {
+ issueNormalTypeError(tree, "macros cannot be eta-expanded")
+ setError(tree)
+ }
+
+ // same reason as for MacroBodyTypecheckException
+ case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
+
+ private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = {
+ def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
+ macroLogLite("macro expansion has failed: %s".format(msgForLog))
+ val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
+ if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
+ setError(expandee)
+ throw MacroExpansionException
+ }
+
+ def MacroPartialApplicationError(expandee: Tree) = {
+ // macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
+ // kinda contradictory to the comment in `macroExpansionError`, but this is how it works
+ issueNormalTypeError(expandee, "macros cannot be partially applied")
+ setError(expandee)
+ throw MacroExpansionException
+ }
- def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
- issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+ def MacroGeneratedAbort(expandee: Tree, ex: AbortMacroException) = {
+ // errors have been reported by the macro itself, so we do nothing here
+ macroLogVerbose("macro expansion has been aborted")
+ macroExpansionError(expandee, ex.msg, ex.pos)
+ }
+
+ def MacroGeneratedTypeError(expandee: Tree, err: TypeError = null) =
+ if (err == null) {
+ // errors have been reported by the macro itself, so we do nothing here
+ macroExpansionError(expandee, null)
+ } else {
+ macroLogLite("macro expansion has failed: %s at %s".format(err.msg, err.pos))
+ throw err // this error must be propagated, don't report
+ }
+
+ def MacroGeneratedException(expandee: Tree, ex: Throwable) = {
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ val message = {
+ try {
+ // [Eugene] is there a better way?
+ // [Paul] See Exceptional.scala and Origins.scala.
+ val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpand1")
+ if (relevancyThreshold == -1) None
+ else {
+ var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
+ def isMacroInvoker(este: StackTraceElement) = este.isNativeMethod || (este.getClassName != null && (este.getClassName contains "fastTrack"))
+ var threshold = relevantElements.reverse.indexWhere(isMacroInvoker) + 1
+ while (threshold != relevantElements.length && isMacroInvoker(relevantElements(relevantElements.length - threshold - 1))) threshold += 1
+ relevantElements = relevantElements dropRight threshold
+
+ realex.setStackTrace(relevantElements)
+ Some(EOL + stackTraceString(realex))
+ }
+ } catch {
+ // the code above tries various tricks to detect the relevant portion of the stack trace
+ // if these tricks fail, just fall back to uninformative, but better than nothing, getMessage
+ case NonFatal(ex) =>
+ macroLogVerbose("got an exception when processing a macro generated exception\n" +
+ "offender = " + stackTraceString(realex) + "\n" +
+ "error = " + stackTraceString(ex))
+ None
+ }
+ } getOrElse {
+ val msg = realex.getMessage
+ if (msg != null) msg else realex.getClass.getName
+ }
+ macroExpansionError(expandee, "exception during macro expansion: " + message)
+ }
+
+ def MacroFreeSymbolError(expandee: Tree, sym: FreeSymbol) = {
+ def template(kind: String) = (
+ s"Macro expansion contains free $kind variable %s. Have you forgotten to use %s? "
+ + s"If you have troubles tracking free $kind variables, consider using -Xlog-free-${kind}s"
+ )
+ val forgotten = (
+ if (sym.isTerm) "splice when splicing this variable into a reifee"
+ else "c.WeakTypeTag annotation for this type parameter"
+ )
+ macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
+ }
+
+ def MacroExpansionIsNotExprError(expandee: Tree, expanded: Any) =
+ macroExpansionError(expandee,
+ "macro must return a compiler-specific expr; returned value is " + (
+ if (expanded == null) "null"
+ else if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
+ else " of " + expanded.getClass
+ ))
+
+ def MacroImplementationNotFoundError(expandee: Tree) =
+ macroExpansionError(expandee,
+ "macro implementation not found: " + expandee.symbol.name + " " +
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
}
}
@@ -704,9 +837,8 @@ trait ContextErrors {
}
// side-effect on the tree, break the overloaded type cycle in infer
- @inline
private def setErrorOnLastTry(lastTry: Boolean, tree: Tree) = if (lastTry) setError(tree)
-
+
def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
issueNormalTypeError(tree,
applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
@@ -719,7 +851,7 @@ trait ContextErrors {
def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
-
+
if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) {
val msg0 =
"argument types " + argtpes.mkString("(", ",", ")") +
@@ -729,7 +861,7 @@ trait ContextErrors {
setErrorOnLastTry(lastTry, tree)
} else setError(tree) // do not even try further attempts because they should all fail
// even if this is not the last attempt (because of the SO's possibility on the horizon)
-
+
}
def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
@@ -753,21 +885,24 @@ trait ContextErrors {
kindErrors.toList.mkString("\n", ", ", ""))
}
- def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
- tparams: List[Symbol], kindErrors: List[String]) = {
- if (settings.explaintypes.value) {
+ private[ContextErrors] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
+ if (explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
(targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
(targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
()
}
- issueNormalTypeError(tree,
- prefix + "type arguments " + targs.mkString("[", ",", "]") +
- " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
- (tparams map (_.defString)).mkString("[", ",", "]"))
+ prefix + "type arguments " + targs.mkString("[", ",", "]") +
+ " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
+ (tparams map (_.defString)).mkString("[", ",", "]")
}
+ def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
+ tparams: List[Symbol], kindErrors: List[String]) =
+ issueNormalTypeError(tree,
+ NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value))
+
//substExpr
def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) =
issueNormalTypeError(tree,
@@ -992,44 +1127,42 @@ trait ContextErrors {
pre1: String, pre2: String, trailer: String)
(isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = {
if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
- val coreMsg =
- pre1+" "+info1.sym.fullLocationString+" of type "+info1.tpe+"\n "+
- pre2+" "+info2.sym.fullLocationString+" of type "+info2.tpe+"\n "+
- trailer
- val errMsg =
- if (isView) {
- val found = pt.typeArgs(0)
- val req = pt.typeArgs(1)
- def defaultExplanation =
- "Note that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
-
- def explanation = {
- val sym = found.typeSymbol
- // Explain some common situations a bit more clearly.
- if (AnyRefClass.tpe <:< req) {
- if (sym == AnyClass || sym == UnitClass) {
- "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" +
- "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."
- }
- else boxedClass get sym match {
- case Some(boxed) =>
- "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" +
- "methods inherited from Object are rendered ambiguous. This is to avoid\n" +
- "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" +
- "You may wish to use a type ascription: `x: " + boxed.fullName + "`."
- case _ =>
- defaultExplanation
- }
- }
- else defaultExplanation
- }
-
- typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + "\n" + explanation
- } else {
- "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
+ def coreMsg =
+ s"""| $pre1 ${info1.sym.fullLocationString} of type ${info1.tpe}
+ | $pre2 ${info2.sym.fullLocationString} of type ${info2.tpe}
+ | $trailer""".stripMargin
+ def viewMsg = {
+ val found :: req :: _ = pt.typeArgs
+ def explanation = {
+ val sym = found.typeSymbol
+ // Explain some common situations a bit more clearly. Some other
+ // failures which have nothing to do with implicit conversions
+ // per se, but which manifest as implicit conversion conflicts
+ // involving Any, are further explained from foundReqMsg.
+ if (AnyRefClass.tpe <:< req) (
+ if (sym == AnyClass || sym == UnitClass) (
+ s"""|Note: ${sym.name} is not implicitly converted to AnyRef. You can safely
+ |pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so.""".stripMargin
+ )
+ else boxedClass get sym map (boxed =>
+ s"""|Note: an implicit exists from ${sym.fullName} => ${boxed.fullName}, but
+ |methods inherited from Object are rendered ambiguous. This is to avoid
+ |a blanket implicit which would convert any ${sym.fullName} to any AnyRef.
+ |You may wish to use a type ascription: `x: ${boxed.fullName}`.""".stripMargin
+ ) getOrElse ""
+ )
+ else
+ s"""|Note that implicit conversions are not applicable because they are ambiguous:
+ |${coreMsg}are possible conversion functions from $found to $req""".stripMargin
}
- context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos, errMsg))
+ typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+ if (explanation == "") "" else "\n" + explanation
+ )
+ }
+ context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+ if (isView) viewMsg
+ else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
+ )
}
}
@@ -1061,7 +1194,7 @@ trait ContextErrors {
setError(arg)
} else arg
}
-
+
def WarnAfterNonSilentRecursiveInference(param: Symbol, arg: Tree)(implicit context: Context) = {
val note = "type-checking the invocation of "+ param.owner +" checks if the named argument expression '"+ param.name + " = ...' is a valid assignment\n"+
"in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for "+ param.name +"."
@@ -1087,4 +1220,133 @@ trait ContextErrors {
setError(arg)
}
}
+
+ // using an exception here is actually a good idea
+ // because the lifespan of this exception is extremely small and controlled
+ // moreover exceptions let us avoid an avalanche of "if (!hasError) do stuff" checks
+ case object MacroBodyTypecheckException extends Exception with scala.util.control.ControlThrowable
+
+ trait MacroErrors {
+ self: MacroTyper =>
+
+ private implicit val context0 = typer.context
+ val context = typer.context
+
+ // helpers
+
+ private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
+ val noun = if (flavor == "value") "parameter" else "type parameter"
+ val message = noun + " lists have different length, " + violation + " extra " + noun
+ val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
+ message + suffix
+ }
+
+ private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+
+ private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
+ var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
+ if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
+ var retPart = restpe.toString
+ if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
+ argsPart + ": " + retPart
+ }
+
+ // not exactly an error generator, but very related
+ // and I dearly wanted to push it away from Macros.scala
+ private def checkSubType(slot: String, rtpe: Type, atpe: Type) = {
+ val ok = if (macroDebugVerbose || settings.explaintypes.value) {
+ if (rtpe eq atpe) println(rtpe + " <: " + atpe + "?" + EOL + "true")
+ withTypesExplained(rtpe <:< atpe)
+ } else rtpe <:< atpe
+ if (!ok) {
+ compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
+ }
+ }
+
+ // errors
+
+ private def fail() = {
+ // need to set the IS_ERROR flag to prohibit spurious expansions
+ if (macroDef != null) macroDef setFlag IS_ERROR
+ // not setting ErrorSymbol as in `infer.setError`, because we still need to know that it's a macro
+ // otherwise assignTypeToTree in Namers might fail if macroDdef.tpt == EmptyTree
+ macroDdef setType ErrorType
+ throw MacroBodyTypecheckException
+ }
+
+ private def genericError(tree: Tree, message: String) = {
+ issueNormalTypeError(tree, message)
+ fail()
+ }
+
+ private def implRefError(message: String) = genericError(methPart(macroDdef.rhs), message)
+
+ private def compatibilityError(message: String) =
+ implRefError(
+ "macro implementation has wrong shape:"+
+ "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
+ "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
+ "\n" + message)
+
+ // Phase I: sanity checks
+
+ def MacroDefIsFastTrack() = {
+ macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
+ assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
+ throw MacroBodyTypecheckException // don't call fail, because we don't need IS_ERROR
+ }
+
+ def MacroFeatureNotEnabled() = {
+ macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
+ fail()
+ }
+
+ // Phase II: typecheck the right-hand side of the macro def
+
+ // do nothing, just fail. relevant typecheck errors have already been reported
+ def MacroDefUntypeableBodyError() = fail()
+
+ def MacroDefInvalidBodyError() = genericError(macroDdef, "macro body has wrong shape:\n required: macro [<implementation object>].<method name>[[<type args>]]")
+
+ def MacroImplNotPublicError() = implRefError("macro implementation must be public")
+
+ def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
+
+ def MacroImplWrongNumberOfTypeArgumentsError(macroImplRef: Tree) = implRefError(typer.TyperErrorGen.TypedApplyWrongNumberOfTpeParametersErrorMessage(macroImplRef))
+
+ def MacroImplNotStaticError() = implRefError("macro implementation must be in statically accessible object")
+
+ // Phase III: check compatibility between the macro def and its macro impl
+ // aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+
+ def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
+
+ def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
+
+ def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
+
+ def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
+
+ def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
+
+ def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkSubType("return type", atpe, rret)
+
+ def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
+
+ def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
+ if (isRepeated(rparam) && !isRepeated(aparam))
+ compatibilityError("types incompatible for parameter " + rparam.name + ": corresponding is not a vararg parameter")
+ if (!isRepeated(rparam) && isRepeated(aparam))
+ compatibilityError("types incompatible for parameter " + aparam.name + ": corresponding is not a vararg parameter")
+ }
+
+ def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
+ compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
+
+ def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
+ compatibilityError(
+ "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
+ ex.getMessage)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index dd5588e9a6..211da044e6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -8,7 +8,7 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable.{LinkedHashSet, Set}
-import annotation.tailrec
+import scala.annotation.tailrec
/**
* @author Martin Odersky
@@ -27,6 +27,13 @@ trait Contexts { self: Analyzer =>
override def implicitss: List[List[ImplicitInfo]] = Nil
override def toString = "NoContext"
}
+ private object RootImports {
+ import definitions._
+ // Possible lists of root imports
+ val javaList = JavaLangPackage :: Nil
+ val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil
+ val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
+ }
private val startContext = {
NoContext.make(
@@ -46,13 +53,12 @@ trait Contexts { self: Analyzer =>
* among its leading imports, or if the tree is [[scala.Predef]], `Predef` is not imported.
*/
protected def rootImports(unit: CompilationUnit): List[Symbol] = {
- import definitions._
- assert(isDefinitionsInitialized, "definitions uninitialized")
+ assert(definitions.isDefinitionsInitialized, "definitions uninitialized")
if (settings.noimports.value) Nil
- else if (unit.isJava) List(JavaLangPackage)
- else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) List(JavaLangPackage, ScalaPackage)
- else List(JavaLangPackage, ScalaPackage, PredefModule)
+ else if (unit.isJava) RootImports.javaList
+ else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList
+ else RootImports.completeList
}
def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false)
@@ -481,7 +487,7 @@ trait Contexts { self: Analyzer =>
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
- @inline def accessWithinLinked(ab: Symbol) = {
+ def accessWithinLinked(ab: Symbol) = {
val linked = ab.linkedClassOfClass
// don't have access if there is no linked class
// (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 5802d36878..e8865964b0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import language.implicitConversions
+import scala.language.implicitConversions
/** A generic means of breaking down types into their subcomponents.
* Types are decomposed top down, and recognizable substructure is
@@ -39,7 +39,7 @@ trait DestructureTypes {
private implicit def liftToTerm(name: String): TermName = newTermName(name)
- private val openSymbols = collection.mutable.Set[Symbol]()
+ private val openSymbols = scala.collection.mutable.Set[Symbol]()
private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
if (elems.isEmpty) wrapEmpty else list(elems map mkNode)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index e590e7aa30..97e86d183e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -29,7 +29,7 @@ abstract class Duplicators extends Analyzer {
* the old class with the new class, and map symbols through the given 'env'. The
* environment is a map from type skolems to concrete types (see SpecializedTypes).
*/
- def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: collection.Map[Symbol, Type]): Tree = {
+ def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree = {
if (oldThis ne newThis) {
oldClassOwner = oldThis
newClassOwner = newThis
@@ -283,14 +283,15 @@ abstract class Duplicators extends Analyzer {
// the typer does not create the symbols for a LabelDef's params, so unless they were created before we need
// to do it manually here -- but for the tailcalls-generated labels, ValDefs are created before the LabelDef,
- // so we just need to plug in the name
+ // so we just need to change the tree to point to the updated symbols
def newParam(p: Tree): Ident =
if (isTailLabel)
- Ident(p.symbol.name) // let the typer pick up the right symbol
+ Ident(updateSym(p.symbol))
else {
val newsym = p.symbol.cloneSymbol //(context.owner) // TODO owner?
Ident(newsym.setInfo(fixType(p.symbol.info)))
}
+
val params1 = params map newParam
val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
rhs1.tpe = null
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 9e175fa516..b04a736fd3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -47,7 +47,7 @@ trait EtaExpansion { self: Analyzer =>
* tree is already attributed
* </p>
*/
- def etaExpand(unit : CompilationUnit, tree: Tree): Tree = {
+ def etaExpand(unit : CompilationUnit, tree: Tree, typer: Typer): Tree = {
val tpe = tree.tpe
var cnt = 0 // for NoPosition
def freshName() = {
@@ -69,7 +69,11 @@ trait EtaExpansion { self: Analyzer =>
val vname: Name = freshName()
// Problem with ticket #2351 here
defs += atPos(tree.pos) {
- val rhs = if (byName) Function(List(), tree) else tree
+ val rhs = if (byName) {
+ val res = typer.typed(Function(List(), tree))
+ new ChangeOwnerTraverser(typer.context.owner, res.symbol) traverse tree // SI-6274
+ res
+ } else tree
ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), rhs)
}
atPos(tree.pos.focus) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index f9035f26b9..7852ff49e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -11,13 +11,13 @@
package scala.tools.nsc
package typechecker
-import annotation.tailrec
+import scala.annotation.tailrec
import scala.collection.{ mutable, immutable }
import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
import scala.reflect.internal.util.Statistics
-import language.implicitConversions
+import scala.language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
*
@@ -72,10 +72,10 @@ trait Implicits {
)
indentTyping()
- val rawTypeStart = Statistics.startCounter(rawTypeImpl)
- val findMemberStart = Statistics.startCounter(findMemberImpl)
- val subtypeStart = Statistics.startCounter(subtypeImpl)
- val start = Statistics.startTimer(implicitNanos)
+ val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null
+ val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null
+ val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeImpl) else null
+ val start = if (Statistics.canEnable) Statistics.startTimer(implicitNanos) else null
if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
@@ -87,10 +87,10 @@ trait Implicits {
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
- Statistics.stopTimer(implicitNanos, start)
- Statistics.stopCounter(rawTypeImpl, rawTypeStart)
- Statistics.stopCounter(findMemberImpl, findMemberStart)
- Statistics.stopCounter(subtypeImpl, subtypeStart)
+ if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start)
+ if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart)
+ if (Statistics.canEnable) Statistics.stopCounter(findMemberImpl, findMemberStart)
+ if (Statistics.canEnable) Statistics.stopCounter(subtypeImpl, subtypeStart)
deindentTyping()
printTyping("Implicit search yielded: "+ result)
result
@@ -181,8 +181,8 @@ trait Implicits {
containsError(restpe)
case NullaryMethodType(restpe) =>
containsError(restpe)
- case MethodType(params, restpe) =>
- params.exists(_.tpe.isError) || containsError(restpe)
+ case mt @ MethodType(_, restpe) =>
+ (mt.paramTypes exists typeIsError) || containsError(restpe)
case _ =>
tp.isError
}
@@ -308,12 +308,12 @@ trait Implicits {
/** Is implicit info `info1` better than implicit info `info2`?
*/
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
- Statistics.incCounter(improvesCount)
+ if (Statistics.canEnable) Statistics.incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
improvesCache get (info1, info2) match {
- case Some(b) => Statistics.incCounter(improvesCachedCount); b
+ case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
improvesCache((info1, info2)) = result
@@ -377,7 +377,7 @@ trait Implicits {
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- Statistics.incCounter(implicitSearchCount)
+ if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
@@ -429,7 +429,7 @@ trait Implicits {
* This method is performance critical: 5-8% of typechecking time.
*/
private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = {
- val start = Statistics.startTimer(matchesPtNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
val result = normSubType(tp, pt) || isView && {
pt match {
case TypeRef(_, Function1.Sym, args) =>
@@ -438,7 +438,7 @@ trait Implicits {
false
}
}
- Statistics.stopTimer(matchesPtNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start)
result
}
private def matchesPt(info: ImplicitInfo): Boolean = (
@@ -537,7 +537,7 @@ trait Implicits {
}
private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
- Statistics.incCounter(plausiblyCompatibleImplicits)
+ if (Statistics.canEnable) Statistics.incCounter(plausiblyCompatibleImplicits)
printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
@@ -557,7 +557,7 @@ trait Implicits {
}
private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
- Statistics.incCounter(matchingImplicits)
+ if (Statistics.canEnable) Statistics.incCounter(matchingImplicits)
val itree = atPos(pos.focus) {
// workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
@@ -593,9 +593,9 @@ trait Implicits {
typed1(itree, EXPRmode, wildPt)
if (context.hasErrors)
- return fail("typed implicit %s has errors".format(info.sym.fullLocationString))
+ return fail(context.errBuffer.head.errMsg)
- Statistics.incCounter(typedImplicits)
+ if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
@@ -615,7 +615,7 @@ trait Implicits {
}
if (context.hasErrors)
- fail("hasMatchingSymbol reported threw error(s)")
+ fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
else if (isLocal && !hasMatchingSymbol(itree1))
fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
@@ -639,7 +639,7 @@ trait Implicits {
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
if (context.hasErrors)
- return fail("type parameters weren't correctly instantiated outside of the implicit tree")
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
@@ -675,10 +675,10 @@ trait Implicits {
}
if (context.hasErrors)
- fail("typing TypeApply reported errors for the implicit tree")
+ fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
else {
val result = new SearchResult(itree2, subst)
- Statistics.incCounter(foundImplicits)
+ if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
printInference("[success] found %s for pt %s".format(result, ptInstantiated))
result
}
@@ -905,11 +905,11 @@ trait Implicits {
* @return map from infos to search results
*/
def applicableInfos(iss: Infoss, isLocal: Boolean): Map[ImplicitInfo, SearchResult] = {
- val start = Statistics.startCounter(subtypeAppInfos)
+ val start = if (Statistics.canEnable) Statistics.startCounter(subtypeAppInfos) else null
val computation = new ImplicitComputation(iss, isLocal) { }
val applicable = computation.findAll()
- Statistics.stopCounter(subtypeAppInfos, start)
+ if (Statistics.canEnable) Statistics.stopCounter(subtypeAppInfos, start)
applicable
}
@@ -1125,13 +1125,13 @@ trait Implicits {
* such that some part of `tp` has C as one of its superclasses.
*/
private def implicitsOfExpectedType: Infoss = {
- Statistics.incCounter(implicitCacheAccs)
+ if (Statistics.canEnable) Statistics.incCounter(implicitCacheAccs)
implicitsCache get pt match {
case Some(implicitInfoss) =>
- Statistics.incCounter(implicitCacheHits)
+ if (Statistics.canEnable) Statistics.incCounter(implicitCacheHits)
implicitInfoss
case None =>
- val start = Statistics.startTimer(subtypeETNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(subtypeETNanos) else null
// val implicitInfoss = companionImplicits(pt)
val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
// val is1 = implicitInfoss.flatten.toSet
@@ -1140,7 +1140,7 @@ trait Implicits {
// if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
// for (i <- is2)
// if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
- Statistics.stopTimer(subtypeETNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(subtypeETNanos, start)
implicitsCache(pt) = implicitInfoss1
if (implicitsCache.size >= sizeLimit)
implicitsCache -= implicitsCache.keysIterator.next
@@ -1150,9 +1150,9 @@ trait Implicits {
private def TagSymbols = TagMaterializers.keySet
private val TagMaterializers = Map[Symbol, Symbol](
- ClassTagClass -> MacroInternal_materializeClassTag,
- AbsTypeTagClass -> MacroInternal_materializeAbsTypeTag,
- TypeTagClass -> MacroInternal_materializeTypeTag
+ ClassTagClass -> materializeClassTag,
+ WeakTypeTagClass -> materializeWeakTypeTag,
+ TypeTagClass -> materializeTypeTag
)
/** Creates a tree will produce a tag of the requested flavor.
@@ -1183,12 +1183,8 @@ trait Implicits {
val prefix = (
// ClassTags are not path-dependent, so their materializer doesn't care about prefixes
- if (tagClass eq ClassTagClass) gen.mkBasisUniverseRef
+ if (tagClass eq ClassTagClass) EmptyTree
else pre match {
- // [Eugene to Martin] this is the crux of the interaction between
- // implicits and reifiers here we need to turn a (supposedly
- // path-dependent) type into a tree that will be used as a prefix I'm
- // not sure if I've done this right - please, review
case SingleType(prePre, preSym) =>
gen.mkAttributedRef(prePre, preSym) setType pre
// necessary only to compile typetags used inside the Universe cake
@@ -1209,7 +1205,7 @@ trait Implicits {
}
)
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
- var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), List(prefix)))
+ var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
if (settings.XlogImplicits.value) println("materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
if (context.macrosEnabled) success(materializer)
// don't call `failure` here. if macros are disabled, we just fail silently
@@ -1221,14 +1217,14 @@ trait Implicits {
private val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
/** Creates a tree that calls the relevant factory method in object
- * reflect.Manifest for type 'tp'. An EmptyTree is returned if
+ * scala.reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
*/
private def manifestOfType(tp: Type, flavor: Symbol): SearchResult = {
val full = flavor == FullManifestClass
val opt = flavor == OptManifestClass
- /** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
+ /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
if (args contains EmptyTree) EmptyTree
else typedPos(tree.pos.focus) {
@@ -1338,16 +1334,10 @@ trait Implicits {
def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
- /** Materializes implicits of magic types (currently, manifests and tags).
+ /** Materializes implicits of predefined types (currently, manifests and tags).
* Will be replaced by implicit macros once we fix them.
*/
- private def materializeImplicit(pt: Type): SearchResult = {
- def fallback = {
- searchImplicit(implicitsOfExpectedType, false)
- // shouldn't we pass `pt` to `implicitsOfExpectedType`, or is the recursive case
- // for an abstract type really only meant for tags?
- }
-
+ private def materializeImplicit(pt: Type): SearchResult =
pt match {
case TypeRef(_, sym, _) if sym.isAbstractType =>
materializeImplicit(pt.dealias.bounds.lo) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.bounds.lo == pt)
@@ -1363,17 +1353,17 @@ trait Implicits {
// unlike `dealias`, `betaReduce` performs at most one step of dealiasing
// while dealias pops all aliases in a single invocation
case sym if sym.isAliasType => materializeImplicit(pt.betaReduce)
- case _ => fallback
+ case _ => SearchFailure
}
case _ =>
- fallback
+ SearchFailure
}
- }
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
- * // [Eugene] the following lines should be deleted after we migrate delegate tag materialization to implicit macros
+ *
+ * todo. the following lines should be deleted after we migrate delegate tag materialization to implicit macros
* If that fails, and `pt` is an instance of a ClassTag, try to construct a class tag.
* If that fails, and `pt` is an instance of a TypeTag, try to construct a type tag.
* If that fails, and `pt` is an instance of a ClassManifest, try to construct a class manifest.
@@ -1382,30 +1372,34 @@ trait Implicits {
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val failstart = Statistics.startTimer(inscopeFailNanos)
- val succstart = Statistics.startTimer(inscopeSucceedNanos)
+ val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
+ val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
var result = searchImplicit(context.implicitss, true)
if (result == SearchFailure) {
- Statistics.stopTimer(inscopeFailNanos, failstart)
+ if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
} else {
- Statistics.stopTimer(inscopeSucceedNanos, succstart)
- Statistics.incCounter(inscopeImplicitHits)
+ if (Statistics.canEnable) Statistics.stopTimer(inscopeSucceedNanos, succstart)
+ if (Statistics.canEnable) Statistics.incCounter(inscopeImplicitHits)
}
if (result == SearchFailure) {
val previousErrs = context.flushAndReturnBuffer()
- val failstart = Statistics.startTimer(oftypeFailNanos)
- val succstart = Statistics.startTimer(oftypeSucceedNanos)
+ val failstart = if (Statistics.canEnable) Statistics.startTimer(oftypeFailNanos) else null
+ val succstart = if (Statistics.canEnable) Statistics.startTimer(oftypeSucceedNanos) else null
result = materializeImplicit(pt)
+ // `materializeImplicit` does some preprocessing for `pt`
+ // is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
+ if (result == SearchFailure) result = searchImplicit(implicitsOfExpectedType, false)
+
if (result == SearchFailure) {
context.updateBuffer(previousErrs)
- Statistics.stopTimer(oftypeFailNanos, failstart)
+ if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- Statistics.stopTimer(oftypeSucceedNanos, succstart)
- Statistics.incCounter(oftypeImplicitHits)
+ if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
+ if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
}
@@ -1478,7 +1472,7 @@ trait Implicits {
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
def validate: Option[String] = {
- import scala.util.matching.Regex; import collection.breakOut
+ import scala.util.matching.Regex; import scala.collection.breakOut
// is there a shorter way to avoid the intermediate toList?
val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
val decls = typeParamNames.toSet
@@ -1498,7 +1492,7 @@ trait Implicits {
object ImplicitsStats {
- import reflect.internal.TypesStats._
+ import scala.reflect.internal.TypesStats._
val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount)
val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index d724164715..22077303a4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -77,7 +77,7 @@ trait Infer {
val isUnapplySeq = unappSym.name == nme.unapplySeq
val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
- @inline def seqToRepeatedChecked(tp: Type) = {
+ def seqToRepeatedChecked(tp: Type) = {
val toRepeated = seqToRepeated(tp)
if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
else toRepeated
@@ -240,8 +240,8 @@ trait Infer {
def normalize(tp: Type): Type = tp match {
case mt @ MethodType(params, restpe) if mt.isImplicit =>
normalize(restpe)
- case mt @ MethodType(params, restpe) if !restpe.isDependent =>
- functionType(params map (_.tpe), normalize(restpe))
+ case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
+ functionType(mt.paramTypes, normalize(restpe))
case NullaryMethodType(restpe) =>
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
@@ -280,7 +280,16 @@ trait Infer {
def issue(err: AbsTypeError): Unit = context.issue(err)
- def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
+ def isPossiblyMissingArgs(found: Type, req: Type) = (
+ false
+ /** However it is that this condition is expected to imply
+ * "is possibly missing args", it is too weak. It is
+ * better to say nothing than to offer misleading guesses.
+
+ (found.resultApprox ne found)
+ && isWeaklyCompatible(found.resultApprox, req)
+ */
+ )
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -517,8 +526,8 @@ trait Infer {
* and the code is not exactly readable.
*/
object AdjustedTypeArgs {
- val Result = collection.mutable.LinkedHashMap
- type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = scala.collection.mutable.LinkedHashMap
+ type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
(m collect {case (p, Some(a)) => (p, a)}).unzip ))
@@ -539,9 +548,9 @@ trait Infer {
})
}
- @inline private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
- @inline private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
- @inline private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
+ private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
+ private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
+ private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
}
/** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params.
@@ -661,7 +670,13 @@ trait Infer {
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
case _ =>
- val appmeth = tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ val appmeth = {
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ val result = tp.nonPrivateMember(nme.apply)
+ if ((result eq NoSymbol) || !result.isOverloaded && result.isPublic) result
+ else result filter (_.isPublic)
+ }
if (appmeth == NoSymbol) tp
else OverloadedType(tp, appmeth.alternatives)
}
@@ -747,8 +762,8 @@ trait Infer {
alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt))
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
- case MethodType(params, _) =>
- val formals = formalTypes(params map { _.tpe }, argtpes0.length, removeByName = false)
+ case mt @ MethodType(params, _) =>
+ val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -854,8 +869,8 @@ trait Infer {
isAsSpecific(res, ftpe2)
case mt: MethodType if mt.isImplicit =>
isAsSpecific(ftpe1.resultType, ftpe2)
- case MethodType(params, _) if params.nonEmpty =>
- var argtpes = params map (_.tpe)
+ case mt @ MethodType(params, _) if params.nonEmpty =>
+ var argtpes = mt.paramTypes
if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
argtpes = argtpes map (argtpe =>
if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
@@ -864,8 +879,8 @@ trait Infer {
isAsSpecific(PolyType(tparams, res), ftpe2)
case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
- case PolyType(_, MethodType(params, _)) if params.nonEmpty =>
- isApplicable(List(), ftpe2, params map (_.tpe), WildcardType)
+ case PolyType(_, (mt @ MethodType(params, _))) if params.nonEmpty =>
+ isApplicable(List(), ftpe2, mt.paramTypes, WildcardType)
// case NullaryMethodType(res) =>
// isAsSpecific(res, ftpe2)
case ErrorType =>
@@ -908,10 +923,13 @@ trait Infer {
/** Is sym1 (or its companion class in case it is a module) a subclass of
* sym2 (or its companion class in case it is a module)?
*/
- def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean =
- sym1 != sym2 && sym1 != NoSymbol && (sym1 isSubClass sym2) ||
- sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2) ||
- sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass)
+ def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
+ (sym1 != sym2) && (sym1 != NoSymbol) && (
+ (sym1 isSubClass sym2)
+ || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+ || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+ )
+ )
/** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
*/
@@ -1111,10 +1129,10 @@ trait Infer {
*/
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
- case MethodType(params0, _) =>
+ case mt @ MethodType(params0, _) =>
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- val formals = formalTypes(params0 map (_.tpe), args.length)
+ val formals = formalTypes(mt.paramTypes, args.length)
val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
val restpe = fn.tpe.resultType(argtpes)
@@ -1367,14 +1385,17 @@ trait Infer {
else =:=
)
(arg hasAnnotation UncheckedClass) || {
- val TypeRef(_, sym, args) = arg.withoutAnnotations
-
- ( isLocalBinding(sym)
- || arg.typeSymbol.isTypeParameterOrSkolem
- || (sym.name == tpnme.WILDCARD) // avoid spurious warnings on HK types
- || check(arg, param.tpe, conforms)
- || warn("non-variable type argument " + arg)
- )
+ arg.withoutAnnotations match {
+ case TypeRef(_, sym, args) =>
+ ( isLocalBinding(sym)
+ || arg.typeSymbol.isTypeParameterOrSkolem
+ || (sym.name == tpnme.WILDCARD) // avoid spurious warnings on HK types
+ || check(arg, param.tpeHK, conforms)
+ || warn("non-variable type argument " + arg)
+ )
+ case _ =>
+ warn("non-variable type argument " + arg)
+ }
}
}
@@ -1641,7 +1662,7 @@ trait Infer {
// for functional values, the `apply` method might be overloaded
val mtypes = followApply(alt.tpe) match {
case OverloadedType(_, alts) => alts map (_.tpe)
- case t => List(t)
+ case t => t :: Nil
}
// Drop those that use a default; keep those that use vararg/tupling conversion.
mtypes exists (t =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 7c5d458fee..bcc37e8b37 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -7,10 +7,13 @@ import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
import java.lang.{Class => jClass}
import java.lang.reflect.{Array => jArray, Method => jMethod}
+import scala.reflect.internal.util.Collections._
+import scala.util.control.ControlThrowable
+import scala.reflect.macros.runtime.AbortMacroException
/**
* Code to deal with macros, namely with:
@@ -23,7 +26,7 @@ import java.lang.reflect.{Array => jArray, Method => jMethod}
*
* Then fooBar needs to point to a static method of the following form:
*
- * def fooBar[T: c.AbsTypeTag]
+ * def fooBar[T: c.WeakTypeTag] // type tag annotation is optional
* (c: scala.reflect.macros.Context)
* (xs: c.Expr[List[T]])
* : c.Expr[T] = {
@@ -31,7 +34,7 @@ import java.lang.reflect.{Array => jArray, Method => jMethod}
* }
*
* Then, if foo is called in qual.foo[Int](elems), where qual: D,
- * the macro application is expanded to a reflective invocation of fooBar with parameters
+ * the macro application is expanded to a reflective invocation of fooBar with parameters:
*
* (simpleMacroContext{ type PrefixType = D; val prefix = qual })
* (Expr(elems))
@@ -42,52 +45,277 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import global._
import definitions._
+ import treeInfo.{isRepeatedParamType => _, _}
import MacrosStats._
def globalSettings = global.settings
- val globalMacroCache = collection.mutable.Map[Any, Any]()
- val perRunMacroCache = perRunCaches.newMap[Symbol, collection.mutable.Map[Any, Any]]
+ /** `MacroImplBinding` and its companion module are responsible for
+ * serialization/deserialization of macro def -> impl bindings.
+ *
+ * The first officially released version of macros persisted these bindings across compilation runs
+ * using a neat trick. The right-hand side of a macro definition (which contains a reference to a macro impl)
+ * was typechecked and then put verbatim into an annotation on the macro definition.
+ *
+ * This solution is very simple, but unfortunately it's also lacking. If we use it, then
+ * signatures of macro defs become transitively dependent on scala-reflect.jar
+ * (because they refer to macro impls, and macro impls refer to scala.reflect.macros.Context defined in scala-reflect.jar).
+ * More details can be found in comments to https://issues.scala-lang.org/browse/SI-5940.
+ *
+ * Therefore we have to avoid putting macro impls into binding pickles and come up with our own serialization format.
+ * Situation is further complicated by the fact that it's not enough to just pickle macro impl's class name and method name,
+ * because macro expansion needs some knowledge about the shape of macro impl's signature (which we can't pickle).
+ * Hence we precompute necessary stuff (e.g. the layout of type parameters) when compiling macro defs.
+ */
+
+ /** Represents all the information that a macro definition needs to know about its implementation.
+ * Includes a path to load the implementation via Java reflection,
+ * and various accounting information necessary when composing an argument list for the reflective invocation.
+ */
+ private case class MacroImplBinding(
+ // Java class name of the class that contains the macro implementation
+ // is used to load the corresponding object with Java reflection
+ val className: String,
+ // method name of the macro implementation
+ // `className` and `methName` are all we need to reflectively invoke a macro implementation
+ // because macro implementations cannot be overloaded
+ val methName: String,
+ // flattens the macro impl's parameter lists having symbols replaced with metadata
+ // currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
+ // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
+ // `signature` will be equal to List(-1, -1, 0, 1)
+ val signature: List[Int],
+ // type arguments part of a macro impl ref (the right-hand side of a macro definition)
+ // these trees don't refer to a macro impl, so we can pickle them as is
+ val targs: List[Tree])
+
+ /** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
+ * with synthetic content that carries the payload described in `MacroImplBinding`.
+ *
+ * For example, for a pair of macro definition and macro implementation:
+ * def impl(c: scala.reflect.macros.Context): c.Expr[Unit] = c.literalUnit;
+ * def foo: Unit = macro impl
+ *
+ * We will have the following annotation added on the macro definition `foo`:
+ *
+ * @scala.reflect.macros.internal.macroImpl(
+ * `macro`(
+ * "signature" = List(-1),
+ * "methodName" = "impl",
+ * "versionFormat" = 1,
+ * "className" = "Macros$"))
+ */
+ private object MacroImplBinding {
+ val versionFormat = 1
+
+ def pickleAtom(obj: Any): Tree =
+ obj match {
+ case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
+ case s: String => Literal(Constant(s))
+ case i: Int => Literal(Constant(i))
+ }
+
+ def unpickleAtom(tree: Tree): Any =
+ tree match {
+ case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom
+ case Literal(Constant(s: String)) => s
+ case Literal(Constant(i: Int)) => i
+ }
+
+ def pickle(macroImplRef: Tree): Tree = {
+ val macroImpl = macroImplRef.symbol
+ val paramss = macroImpl.paramss
+
+ // this logic relies on the assumptions that were valid for the old macro prototype
+ // namely that macro implementations can only be defined in top-level classes and modules
+ // with the new prototype that materialized in a SIP, macros need to be statically accessible, which is different
+ // for example, a macro def could be defined in a trait that is implemented by an object
+ // there are some more clever cases when seemingly non-static method ends up being statically accessible
+ // however, the code below doesn't account for these guys, because it'd take a look of time to get it right
+ // for now I leave it as a todo and move along to more the important stuff
+ // todo. refactor when fixing SI-5498
+ def className: String = {
+ def loop(sym: Symbol): String = sym match {
+ case sym if sym.owner.isPackageClass =>
+ val suffix = if (sym.isModuleClass) "$" else ""
+ sym.fullName + suffix
+ case sym =>
+ val separator = if (sym.owner.isModuleClass) "" else "$"
+ loop(sym.owner) + separator + sym.javaSimpleName.toString
+ }
+
+ loop(macroImpl.owner.enclClass)
+ }
+
+ def signature: List[Int] = {
+ val transformed = transformTypeTagEvidenceParams(paramss, (param, tparam) => tparam)
+ transformed.flatten map (p => if (p.isTerm) -1 else p.paramPos)
+ }
+
+ val payload = List[(String, Any)](
+ "versionFormat" -> versionFormat,
+ "className" -> className,
+ "methodName" -> macroImpl.name.toString,
+ "signature" -> signature
+ )
+
+ // the shape of the nucleus is chosen arbitrarily. it doesn't carry any payload.
+ // it's only necessary as a stub `fun` for an Apply node that carries metadata in its `args`
+ // so don't try to find a program element named "macro" that corresponds to the nucleus
+ // I just named it "macro", because it's macro-related, but I could as well name it "foobar"
+ val nucleus = Ident(newTermName("macro"))
+ val wrapped = Apply(nucleus, payload map { case (k, v) => Assign(pickleAtom(k), pickleAtom(v)) })
+ val pickle = gen.mkTypeApply(wrapped, treeInfo.typeArguments(macroImplRef.duplicate))
+
+ // assign NoType to all freshly created AST nodes
+ // otherwise pickler will choke on tree.tpe being null
+ // there's another gotcha
+ // if you don't assign a ConstantType to a constant
+ // then pickling will crash
+ new Transformer {
+ override def transform(tree: Tree) = {
+ tree match {
+ case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const)
+ case _ if tree.tpe == null => tree setType NoType
+ case _ => ;
+ }
+ super.transform(tree)
+ }
+ }.transform(pickle)
+ }
+
+ def unpickle(pickle: Tree): MacroImplBinding = {
+ val (wrapped, targs) =
+ pickle match {
+ case TypeApply(wrapped, targs) => (wrapped, targs)
+ case wrapped => (wrapped, Nil)
+ }
+ val Apply(_, pickledPayload) = wrapped
+ val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
+
+ val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int]
+ if (versionFormat != pickleVersionFormat) throw new Error("macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat")
- /** A list of compatible macro implementation signatures.
+ val className = payload("className").asInstanceOf[String]
+ val methodName = payload("methodName").asInstanceOf[String]
+ val signature = payload("signature").asInstanceOf[List[Int]]
+ MacroImplBinding(className, methodName, signature, targs)
+ }
+ }
+
+ private def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
+ val pickle = MacroImplBinding.pickle(macroImplRef)
+ macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil)
+ }
+
+ private def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
+ val Some(AnnotationInfo(_, List(pickle), _)) = macroDef.getAnnotation(MacroImplAnnotation)
+ MacroImplBinding.unpickle(pickle)
+ }
+
+ /** Transforms parameters lists of a macro impl.
+ * The `transform` function is invoked only for WeakTypeTag evidence parameters.
+ *
+ * The transformer takes two arguments: a value parameter from the parameter list
+ * and a type parameter that is witnesses by the value parameter.
+ *
+ * If the transformer returns a NoSymbol, the value parameter is not included from the result.
+ * If the transformer returns something else, this something else is included in the result instead of the value parameter.
+ *
+ * Despite of being highly esoteric, this function significantly simplifies signature analysis.
+ * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
+ * or to streamline creation of the list of macro arguments.
+ */
+ private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+ if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
+ if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
+ def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
+ case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
+ if c == paramss.head.head && universe == MacroContextUniverse =>
+ transform(param, targ.typeSymbol)
+ case _ =>
+ param
+ }
+ val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
+ if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
+ }
+
+ def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroImpl: Symbol): Type = {
+ // Step I. Transform c.Expr[T] to T
+ var runtimeType = macroImpl.tpe.finalResultType.dealias match {
+ case TypeRef(_, ExprClass, runtimeType :: Nil) => runtimeType
+ case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
+ }
+
+ // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
+ runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, loadMacroImplBinding(macroDdef.symbol).targs.map(_.tpe))
+
+ // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
+ def unsigma(tpe: Type): Type =
+ transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol) match {
+ case (implCtxParam :: Nil) :: implParamss =>
+ val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
+ object UnsigmaTypeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = pre match {
+ case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
+ ThisType(macroDdef.symbol.owner)
+ case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
+ implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ case _ =>
+ pre
+ }
+ val args1 = args map mapOver
+ TypeRef(pre1, sym, args1)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ UnsigmaTypeMap(tpe)
+ case _ =>
+ tpe
+ }
+
+ unsigma(runtimeType)
+ }
+
+ /** A reference macro implementation signature compatible with a given macro definition.
+ *
+ * In the example above for the following macro def:
+ * def foo[T](xs: List[T]): T = macro fooBar
*
- * In the example above:
+ * This function will return:
* (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]]): c.Expr[T]
*
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
* @param macroDef The macro definition symbol
* @param tparams The type parameters of the macro definition
* @param vparamss The value parameters of the macro definition
* @param retTpe The return type of the macro definition
*/
- private def macroImplSigs(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[List[Symbol]]], Type) = {
+ private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
// had to move method's body to an object because of the recursive dependencies between sigma and param
object SigGenerator {
- val hasThis = macroDef.owner.isClass
- val ownerTpe = macroDef.owner match {
- case owner if owner.isModuleClass => new UniqueThisType(macroDef.owner)
- case owner if owner.isClass => macroDef.owner.tpe
- case _ => NoType
- }
- val hasTparams = !tparams.isEmpty
-
def sigma(tpe: Type): Type = {
class SigmaTypeMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
val pre1 = pre match {
case ThisType(sym) if sym == macroDef.owner =>
- SingleType(SingleType(SingleType(NoPrefix, paramsCtx(0)), MacroContextPrefix), ExprValue)
+ SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
case SingleType(NoPrefix, sym) =>
mfind(vparamss)(_.symbol == sym) match {
- case Some(macroDefParam) =>
- SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
- case _ =>
- pre
+ case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
+ case _ => pre
}
case _ =>
pre
}
- val args1 = args map mapOver
- TypeRef(pre1, sym, args1)
+ TypeRef(pre1, sym, args map mapOver)
case _ =>
mapOver(tp)
}
@@ -105,35 +333,20 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
RepeatedParamClass.typeConstructor,
List(implType(isType, sigma(origTpe.typeArgs.head))))
else {
- val tsym = getMember(MacroContextClass, if (isType) tpnme.AbsTypeTag else tpnme.Expr)
+ val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
}
- val paramCache = collection.mutable.Map[Symbol, Symbol]()
+ val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
def param(tree: Tree): Symbol =
paramCache.getOrElseUpdate(tree.symbol, {
- // [Eugene] deskolemization became necessary once I implemented inference of macro def return type
- // please, verify this solution, but for now I'll leave it here - cargo cult for the win
- val sym = tree.symbol.deSkolemize
+ val sym = tree.symbol
val sigParam = makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe))
if (sym.isSynthetic) sigParam.flags |= SYNTHETIC
sigParam
})
- val paramsCtx = List(ctxParam)
- val paramsThis = List(makeParam(nme.macroThis, macroDef.pos, implType(false, ownerTpe), SYNTHETIC))
- val paramsTparams = tparams map param
- val paramssParams = mmap(vparamss)(param)
-
- var paramsss = List[List[List[Symbol]]]()
- // tparams are no longer part of a signature, they get into macro implementations via context bounds
-// if (hasTparams && hasThis) paramsss :+= paramsCtx :: paramsThis :: paramsTparams :: paramssParams
-// if (hasTparams) paramsss :+= paramsCtx :: paramsTparams :: paramssParams
- // _this params are no longer part of a signature, its gets into macro implementations via Context.prefix
-// if (hasThis) paramsss :+= paramsCtx :: paramsThis :: paramssParams
- paramsss :+= paramsCtx :: paramssParams
-
- val tsym = getMember(MacroContextClass, tpnme.Expr)
- val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(retTpe)))
+ val paramss = List(ctxParam) :: mmap(vparamss)(param)
+ val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe)))
}
import SigGenerator._
@@ -141,141 +354,26 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroTraceVerbose("tparams are: ")(tparams)
macroTraceVerbose("vparamss are: ")(vparamss)
macroTraceVerbose("retTpe is: ")(retTpe)
- macroTraceVerbose("macroImplSigs are: ")(paramsss, implRetTpe)
+ macroTraceVerbose("macroImplSig is: ")((paramss, implRetTpe))
}
- private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Option[Symbol]): List[List[Symbol]] = {
- if (paramss.length == 0)
- return paramss
-
- val wannabe = if (paramss.head.length == 1) paramss.head.head else NoSymbol
- val contextParam = if (wannabe != NoSymbol && wannabe.tpe <:< definitions.MacroContextClass.tpe) wannabe else NoSymbol
-
- val lastParamList0 = paramss.lastOption getOrElse Nil
- val lastParamList = lastParamList0 flatMap (param => param.tpe match {
- case TypeRef(SingleType(NoPrefix, contextParam), sym, List(tparam)) =>
- var wannabe = sym
- while (wannabe.isAliasType) wannabe = wannabe.info.typeSymbol
- if (wannabe != definitions.AbsTypeTagClass)
- List(param)
- else
- transform(param, tparam.typeSymbol) map (_ :: Nil) getOrElse Nil
- case _ =>
- List(param)
- })
-
- var result = paramss.dropRight(1) :+ lastParamList
- if (lastParamList0.isEmpty ^ lastParamList.isEmpty) {
- result = result dropRight 1
- }
-
- result
- }
-
- /** As specified above, body of a macro definition must reference its implementation.
- * This function verifies that the body indeed refers to a method, and that
- * the referenced macro implementation is compatible with the given macro definition.
- *
- * This means that macro implementation (fooBar in example above) must:
- * 1) Refer to a statically accessible, non-overloaded method.
- * 2) Have the right parameter lists as outlined in the SIP / in the doc comment of this class.
+ /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
+ * and that that method is signature-wise compatible with the given macro definition.
*
- * @return typechecked rhs of the given macro definition
+ * @return Typechecked rhs of the given macro definition if everything is okay.
+ * EmptyTree if an error occurs.
*/
- def typedMacroBody(typer: Typer, ddef: DefDef): Tree = {
- import typer.context
- macroLogVerbose("typechecking macro def %s at %s".format(ddef.symbol, ddef.pos))
-
- if (fastTrack contains ddef.symbol) {
- macroLogVerbose("typecheck terminated unexpectedly: macro is hardwired")
- assert(!ddef.tpt.isEmpty, "hardwired macros must provide result type")
- return EmptyTree
- }
-
- if (!typer.checkFeature(ddef.pos, MacrosFeature, immediate = true)) {
- macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
- ddef.symbol setFlag IS_ERROR
- return EmptyTree
- }
-
- implicit class AugmentedString(s: String) {
- def abbreviateCoreAliases: String = { // hack!
- var result = s
- result = result.replace("c.universe.AbsTypeTag", "c.AbsTypeTag")
- result = result.replace("c.universe.Expr", "c.Expr")
- result
- }
- }
-
- var hasErrors = false
- def reportError(pos: Position, msg: String) = {
- hasErrors = true
- context.error(pos, msg)
- }
-
- val macroDef = ddef.symbol
- val defpos = macroDef.pos
- val implpos = ddef.rhs.pos
- assert(macroDef.isTermMacro, ddef)
-
- def invalidBodyError() =
- reportError(defpos,
- "macro body has wrong shape:" +
- "\n required: macro <reference to implementation object>.<implementation method name>" +
- "\n or : macro <implementation method name>")
- def validatePreTyper(rhs: Tree): Unit = rhs match {
- // we do allow macro invocations inside macro bodies
- // personally I don't mind if pre-typer tree is a macro invocation
- // that later resolves to a valid reference to a macro implementation
- // however, I don't think that invalidBodyError() should hint at that
- // let this be an Easter Egg :)
- case Apply(_, _) => ;
- case TypeApply(_, _) => ;
- case Super(_, _) => ;
- case This(_) => ;
- case Ident(_) => ;
- case Select(_, _) => ;
- case _ => invalidBodyError()
- }
- def validatePostTyper(rhs1: Tree): Unit = {
- def loop(tree: Tree): Unit = {
- def errorNotStatic() =
- reportError(implpos, "macro implementation must be in statically accessible object")
-
- def ensureRoot(sym: Symbol) =
- if (!sym.isModule && !sym.isModuleClass) errorNotStatic()
-
- def ensureModule(sym: Symbol) =
- if (!sym.isModule) errorNotStatic()
-
- tree match {
- case TypeApply(fun, _) =>
- loop(fun)
- case Super(qual, _) =>
- ensureRoot(macroDef.owner)
- loop(qual)
- case This(_) =>
- ensureRoot(tree.symbol)
- case Select(qual, name) if name.isTypeName =>
- loop(qual)
- case Select(qual, name) if name.isTermName =>
- if (tree.symbol != rhs1.symbol) ensureModule(tree.symbol)
- loop(qual)
- case Ident(name) if name.isTypeName =>
- ;
- case Ident(name) if name.isTermName =>
- if (tree.symbol != rhs1.symbol) ensureModule(tree.symbol)
- case _ =>
- invalidBodyError()
- }
- }
-
- loop(rhs1)
- }
-
- val rhs = ddef.rhs
- validatePreTyper(rhs)
- if (hasErrors) macroTraceVerbose("macro def failed to satisfy trivial preconditions: ")(macroDef)
+ def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree =
+ try new MacroTyper(typer, macroDdef).typed
+ catch { case MacroBodyTypecheckException => EmptyTree }
+
+ class MacroTyper(val typer: Typer, val macroDdef: DefDef) extends MacroErrors {
+ // Phase I: sanity checks
+ val macroDef = macroDdef.symbol
+ macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
+ assert(macroDef.isTermMacro, macroDdef)
+ if (fastTrack contains macroDef) MacroDefIsFastTrack()
+ if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
// we use typed1 instead of typed, because otherwise adapt is going to mess us up
// if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
@@ -283,11 +381,13 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
// because it's adapt which is responsible for automatic expansion during typechecking
def typecheckRhs(rhs: Tree): Tree = {
try {
- val prevNumErrors = reporter.ERROR.count // [Eugene] funnily enough, the isErroneous check is not enough
- var rhs1 = if (hasErrors) EmptyTree else typer.typed1(rhs, EXPRmode, WildcardType)
- def typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
+ // interestingly enough, just checking isErroneous doesn't cut it
+ // e.g. a "type arguments [U] do not conform to method foo's type parameter bounds" error
+ // doesn't manifest itself as an error in the resulting tree
+ val prevNumErrors = reporter.ERROR.count
+ var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isTermMacro && !rhs1.symbol.isErroneous
- while (!typecheckedWithErrors && rhsNeedsMacroExpansion) {
+ while (rhsNeedsMacroExpansion) {
rhs1 = macroExpand1(typer, rhs1) match {
case Success(expanded) =>
try {
@@ -295,7 +395,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
typechecked
} finally {
- openMacros = openMacros.tail
+ popMacroContext()
}
case Fallback(fallback) =>
typer.typed1(fallback, EXPRmode, WildcardType)
@@ -303,298 +403,87 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
result
}
}
+ val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
+ if (typecheckedWithErrors) MacroDefUntypeableBodyError()
rhs1
} catch {
case ex: TypeError =>
typer.reportTypeError(context, rhs.pos, ex)
- typer.infer.setError(rhs)
- }
- }
-
- val prevNumErrors = reporter.ERROR.count // funnily enough, the isErroneous check is not enough
- var rhs1 = typecheckRhs(rhs)
- def typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
- hasErrors = hasErrors || typecheckedWithErrors
- if (typecheckedWithErrors) macroTraceVerbose("body of a macro def failed to typecheck: ")(ddef)
-
- val macroImpl = rhs1.symbol
- macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(rhs1), Nil)
- if (!hasErrors) {
- if (macroImpl == null) {
- invalidBodyError()
- } else {
- if (!macroImpl.isMethod)
- invalidBodyError()
- if (macroImpl.isOverloaded)
- reportError(implpos, "macro implementation cannot be overloaded")
- if (!macroImpl.typeParams.isEmpty && (!rhs1.isInstanceOf[TypeApply]))
- reportError(implpos, "macro implementation reference needs type arguments")
- if (!hasErrors)
- validatePostTyper(rhs1)
+ MacroDefUntypeableBodyError()
}
- if (hasErrors)
- macroTraceVerbose("macro def failed to satisfy trivial preconditions: ")(macroDef)
}
- if (!hasErrors) {
- def checkCompatibility(reqparamss: List[List[Symbol]], actparamss: List[List[Symbol]], reqres: Type, actres: Type): List[String] = {
- var hasErrors = false
- var errors = List[String]()
- def compatibilityError(msg: String) {
- hasErrors = true
- errors :+= msg
- }
-
- val flatreqparams = reqparamss.flatten
- val flatactparams = actparamss.flatten
- val tparams = macroImpl.typeParams
- val tvars = tparams map freshVar
- def lengthMsg(which: String, extra: Symbol) =
- "parameter lists have different length, "+which+" extra parameter "+extra.defString
- if (actparamss.length != reqparamss.length)
- compatibilityError("number of parameter sections differ")
-
- def checkSubType(slot: String, reqtpe: Type, acttpe: Type): Unit = {
- val ok = if (macroDebugVerbose) {
- if (reqtpe eq acttpe) println(reqtpe + " <: " + acttpe + "?" + EOL + "true")
- withTypesExplained(reqtpe <:< acttpe)
- } else reqtpe <:< acttpe
- if (!ok) {
- compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, reqtpe.toString.abbreviateCoreAliases, acttpe.toString.abbreviateCoreAliases))
- }
- }
-
- if (!hasErrors) {
- try {
- for ((rparams, aparams) <- reqparamss zip actparamss) {
- if (rparams.length < aparams.length)
- compatibilityError(lengthMsg("found", aparams(rparams.length)))
- if (aparams.length < rparams.length)
- compatibilityError(lengthMsg("required", rparams(aparams.length)).abbreviateCoreAliases)
- }
- // if the implementation signature is already deemed to be incompatible, we bail out
- // otherwise, high-order type magic employed below might crash in weird ways
- if (!hasErrors) {
- for ((rparams, aparams) <- reqparamss zip actparamss) {
- for ((rparam, aparam) <- rparams zip aparams) {
- def isRepeated(param: Symbol) = param.tpe.typeSymbol == RepeatedParamClass
- if (rparam.name != aparam.name && !rparam.isSynthetic) {
- val rparam1 = rparam
- val aparam1 = aparam
- compatibilityError("parameter names differ: "+rparam.name+" != "+aparam.name)
- }
- if (isRepeated(rparam) && !isRepeated(aparam))
- compatibilityError("types incompatible for parameter "+rparam.name+": corresponding is not a vararg parameter")
- if (!isRepeated(rparam) && isRepeated(aparam))
- compatibilityError("types incompatible for parameter "+aparam.name+": corresponding is not a vararg parameter")
- if (!hasErrors) {
- var atpe = aparam.tpe.substSym(flatactparams, flatreqparams).instantiateTypeParams(tparams, tvars)
- atpe = atpe.dealias // SI-5706
- // strip the { type PrefixType = ... } refinement off the Context or otherwise we get compatibility errors
- atpe = atpe match {
- case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
- case _ => atpe
- }
- checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
- }
- }
- }
- }
- if (!hasErrors) {
- val atpe = actres.substSym(flatactparams, flatreqparams).instantiateTypeParams(tparams, tvars)
- checkSubType("return type", atpe, reqres)
- }
- if (!hasErrors) {
- val targs = solvedTypes(tvars, tparams, tparams map varianceInType(actres), false,
- lubDepth(flatactparams map (_.tpe)) max lubDepth(flatreqparams map (_.tpe)))
- val boundsOk = typer.silent(_.infer.checkBounds(ddef, NoPrefix, NoSymbol, tparams, targs, ""))
- boundsOk match {
- case SilentResultValue(true) => ;
- case SilentResultValue(false) | SilentTypeError(_) =>
- val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
- compatibilityError("type arguments " + targs.mkString("[", ",", "]") +
- " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
- (tparams map (_.defString)).mkString("[", ",", "]"))
- }
- }
- } catch {
- case ex: NoInstance =>
- compatibilityError(
- "type parameters "+(tparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
- ex.getMessage)
- }
- }
-
- errors.toList
- }
-
- var actparamss = macroImpl.paramss
- actparamss = transformTypeTagEvidenceParams(actparamss, (param, tparam) => None)
-
- val rettpe = if (!ddef.tpt.isEmpty) typer.typedType(ddef.tpt).tpe else computeMacroDefTypeFromMacroImpl(ddef, macroDef, macroImpl)
- val (reqparamsss0, reqres0) = macroImplSigs(macroDef, ddef.tparams, ddef.vparamss, rettpe)
- var reqparamsss = reqparamsss0
-
- // prohibit implicit params on macro implementations
- // we don't have to do this, but it appears to be more clear than allowing them
- val implicitParams = actparamss.flatten filter (_.isImplicit)
- if (implicitParams.length > 0) {
- reportError(implicitParams.head.pos, "macro implementations cannot have implicit parameters other than AbsTypeTag evidences")
- macroTraceVerbose("macro def failed to satisfy trivial preconditions: ")(macroDef)
- }
-
- if (!hasErrors) {
- val reqres = reqres0
- val actres = macroImpl.tpe.finalResultType
- def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
- var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
- if (abbreviate) argsPart = argsPart.abbreviateCoreAliases
- var retPart = restpe.toString
- if (abbreviate || ddef.tpt.tpe == null) retPart = retPart.abbreviateCoreAliases
- argsPart + ": " + retPart
- }
- def compatibilityError(addendum: String) =
- reportError(implpos,
- "macro implementation has wrong shape:"+
- "\n required: "+showMeth(reqparamsss.head, reqres, true) +
- (reqparamsss.tail map (paramss => "\n or : "+showMeth(paramss, reqres, true)) mkString "")+
- "\n found : "+showMeth(actparamss, actres, false)+
- "\n"+addendum)
-
- macroTraceVerbose("considering " + reqparamsss.length + " possibilities of compatible macro impl signatures for macro def: ")(ddef.name)
- val results = reqparamsss map (checkCompatibility(_, actparamss, reqres, actres))
- if (macroDebugVerbose) (reqparamsss zip results) foreach { case (reqparamss, result) =>
- println("%s %s".format(if (result.isEmpty) "[ OK ]" else "[FAILED]", reqparamss))
- result foreach (errorMsg => println(" " + errorMsg))
- }
-
- if (results forall (!_.isEmpty)) {
- var index = reqparamsss indexWhere (_.length == actparamss.length)
- if (index == -1) index = 0
- val mostRelevantMessage = results(index).head
- compatibilityError(mostRelevantMessage)
- } else {
- assert((results filter (_.isEmpty)).length == 1, results)
- if (macroDebugVerbose) (reqparamsss zip results) filter (_._2.isEmpty) foreach { case (reqparamss, result) =>
- println("typechecked macro impl as: " + reqparamss)
- }
- }
- }
- }
-
- // if this macro definition is erroneous, then there's no sense in expanding its usages
- // in the previous prototype macro implementations were magically generated from macro definitions
- // so macro definitions and its usages couldn't be compiled in the same compilation run
- // however, now definitions and implementations are decoupled, so it's everything is possible
- // hence, we now use IS_ERROR flag to serve as an indicator that given macro definition is broken
- if (hasErrors) {
- macroDef setFlag IS_ERROR
+ // Phase II: typecheck the right-hand side of the macro def
+ val typed = typecheckRhs(macroDdef.rhs)
+ typed match {
+ case MacroImplReference(owner, meth, targs) =>
+ if (!meth.isMethod) MacroDefInvalidBodyError()
+ if (!meth.isPublic) MacroImplNotPublicError()
+ if (meth.isOverloaded) MacroImplOverloadedError()
+ if (!owner.isStaticOwner && !owner.moduleClass.isStaticOwner) MacroImplNotStaticError()
+ if (meth.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError(typed)
+ bindMacroImpl(macroDef, typed)
+ case _ =>
+ MacroDefInvalidBodyError()
}
- rhs1
- }
-
- def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroDef: Symbol, macroImpl: Symbol): Type = {
- // get return type from method type
- def unwrapRet(tpe: Type): Type = {
- def loop(tpe: Type) = tpe match {
- case NullaryMethodType(ret) => ret
- case mtpe @ MethodType(_, ret) => unwrapRet(ret)
- case _ => tpe
- }
+ // Phase III: check compatibility between the macro def and its macro impl
+ // this check ignores type tag evidence parameters, because type tag context bounds are optional
+ // aXXX (e.g. aparamss) => characteristics of the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparamss) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+ val macroImpl = typed.symbol
+ val aparamss = transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol)
+ val aret = macroImpl.tpe.finalResultType
+ val macroDefRet =
+ if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
+ else computeMacroDefTypeFromMacroImpl(macroDdef, macroImpl)
+ val (rparamss, rret) = macroImplSig(macroDef, macroDdef.tparams, macroDdef.vparamss, macroDefRet)
+
+ val implicitParams = aparamss.flatten filter (_.isImplicit)
+ if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
+ if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
+
+ val atparams = macroImpl.typeParams
+ val atvars = atparams map freshVar
+ def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
- tpe match {
- case PolyType(_, tpe) => loop(tpe)
- case _ => loop(tpe)
- }
- }
- var metaType = unwrapRet(macroImpl.tpe)
+ try {
+ map2(aparamss, rparamss)((aparams, rparams) => {
+ if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
+ if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
+ })
- // downgrade from metalevel-0 to metalevel-1
- def inferRuntimeType(metaType: Type): Type = metaType match {
- case TypeRef(pre, sym, args) if sym.name == tpnme.Expr && args.length == 1 =>
- args.head
- case _ =>
- AnyClass.tpe
- }
- var runtimeType = inferRuntimeType(metaType)
-
- // transform type parameters of a macro implementation into type parameters of a macro definition
- runtimeType = runtimeType map {
- case TypeRef(pre, sym, args) =>
- // [Eugene] not sure which of these deSkolemizes are necessary
- // sym.paramPos is unreliable (see another case below)
- val tparams = macroImpl.typeParams map (_.deSkolemize)
- val paramPos = tparams indexOf sym.deSkolemize
- val sym1 = if (paramPos == -1) sym else {
- val ann = macroDef.getAnnotation(MacroImplAnnotation)
- ann match {
- case Some(ann) =>
- val TypeApply(_, implRefTargs) = ann.args(0)
- val implRefTarg = implRefTargs(paramPos).tpe.typeSymbol
- implRefTarg
- case None =>
- sym
- }
+ // cannot fuse these loops because if aparamss.flatten != rparamss.flatten
+ // then `atpeToRtpe` is going to fail with an unsound substitution
+ map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
+ if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
+ if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
+ val aparamtpe = aparam.tpe.dealias match {
+ case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case tpe => tpe
}
- TypeRef(pre, sym1, args)
- case tpe =>
- tpe
- }
+ checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
+ })
- // as stated in the spec, before being matched to macroimpl, type and value parameters of macrodef
- // undergo a special transformation, sigma, that adapts them to the different metalevel macroimpl lives in
- // as a result, we need to reverse this transformation when inferring macrodef ret from macroimpl ret
- def unsigma(tpe: Type): Type = {
- // unfortunately, we cannot dereference ``paramss'', because we're in the middle of inferring a type for ``macroDef''
-// val defParamss = macroDef.paramss
- val defParamss = mmap(macroDdef.vparamss)(_.symbol)
- var implParamss = macroImpl.paramss
- implParamss = transformTypeTagEvidenceParams(implParamss, (param, tparam) => None)
-
- val implCtxParam = if (implParamss.length > 0 && implParamss(0).length > 0) implParamss(0)(0) else null
- def implParamToDefParam(implParam: Symbol): Symbol = {
- val indices = (((implParamss drop 1).zipWithIndex) map { case (implParams, index) => (index, implParams indexOf implParam) } filter (_._2 != -1)).headOption
- val defParam = indices flatMap {
- case (plistIndex, pIndex) =>
- if (defParamss.length <= plistIndex) None
- else if (defParamss(plistIndex).length <= pIndex) None
- else Some(defParamss(plistIndex)(pIndex))
- }
- defParam.orNull
- }
+ checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
- class UnsigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case SingleType(SingleType(SingleType(NoPrefix, param), prefix), value) if param == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
- ThisType(macroDef.owner)
- case SingleType(SingleType(NoPrefix, param), value) if implParamToDefParam(param) != null && value == ExprValue =>
- val macroDefParam = implParamToDefParam(param)
- SingleType(NoPrefix, macroDefParam)
- case _ =>
- pre
- }
- val args1 = args map mapOver
- TypeRef(pre1, sym, args1)
- case _ =>
- mapOver(tp)
- }
+ val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
+ val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
+ val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
+ boundsOk match {
+ case SilentResultValue(true) => // do nothing, success
+ case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
}
-
- new UnsigmaTypeMap() apply tpe
+ } catch {
+ case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
}
- runtimeType = unsigma(runtimeType)
-
- runtimeType
}
/** Macro classloader that is used to resolve and run macro implementations.
* Loads classes from from -cp (aka the library classpath).
* Is also capable of detecting REPL and reusing its classloader.
*/
- private lazy val macroClassloader: ClassLoader = {
+ lazy val macroClassloader: ClassLoader = {
if (global.forMSIL)
throw new UnsupportedOperationException("Scala reflection not available on this platform")
@@ -602,7 +491,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
- // [Eugene] a heuristic to detect the REPL
+ // a heuristic to detect the REPL
if (global.settings.exposeEmptyPackage.value) {
macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs))
import scala.tools.nsc.interpreter._
@@ -619,118 +508,44 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 3) Loads the companion of that enclosing class from the macro classloader.
* 4) Resolves macro implementation within the loaded companion.
*
- * @return Some(runtime) if macro implementation can be loaded successfully from either of the mirrors,
- * None otherwise.
+ * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
+ * `null` otherwise.
*/
- type MacroRuntime = List[Any] => Any
- private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, Option[MacroRuntime]]
- private def macroRuntime(macroDef: Symbol): Option[MacroRuntime] = {
+ type MacroRuntime = MacroArgs => Any
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+ private def macroRuntime(macroDef: Symbol): MacroRuntime = {
macroTraceVerbose("looking for macro implementation: ")(macroDef)
if (fastTrack contains macroDef) {
macroLogVerbose("macro expansion is serviced by a fast track")
- Some(fastTrack(macroDef))
+ fastTrack(macroDef)
} else {
macroRuntimesCache.getOrElseUpdate(macroDef, {
- val runtime = {
- macroTraceVerbose("macroDef is annotated with: ")(macroDef.annotations)
-
- val ann = macroDef.getAnnotation(MacroImplAnnotation)
- if (ann == None) { macroTraceVerbose("@macroImpl annotation is missing (this means that macro definition failed to typecheck)")(macroDef); return None }
-
- val macroImpl = ann.get.args(0).symbol
- if (macroImpl == NoSymbol) { macroTraceVerbose("@macroImpl annotation is malformed (this means that macro definition failed to typecheck)")(macroDef); return None }
- macroLogVerbose("resolved implementation %s at %s".format(macroImpl, macroImpl.pos))
- if (macroImpl.isErroneous) { macroTraceVerbose("macro implementation is erroneous (this means that either macro body or macro implementation signature failed to typecheck)")(macroDef); return None }
-
- // [Eugene++] I don't use Scala reflection here, because it seems to interfere with JIT magic
- // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
- // I'm not sure what's the reason - for me it's pure voodoo
- def loadMacroImpl(cl: ClassLoader): Option[(Object, jMethod)] = {
- try {
- // this logic relies on the assumptions that were valid for the old macro prototype
- // namely that macro implementations can only be defined in top-level classes and modules
- // with the new prototype that materialized in a SIP, macros need to be statically accessible, which is different
- // for example, a macro def could be defined in a trait that is implemented by an object
- // there are some more clever cases when seemingly non-static method ends up being statically accessible
- // however, the code below doesn't account for these guys, because it'd take a look of time to get it right
- // for now I leave it as a todo and move along to more the important stuff
-
- macroTraceVerbose("loading implementation class: ")(macroImpl.owner.fullName)
- macroTraceVerbose("classloader is: ")(ReflectionUtils.show(cl))
-
- // [Eugene] relies on the fact that macro implementations can only be defined in static classes
- // [Martin to Eugene++] There's similar logic buried in Symbol#flatname. Maybe we can refactor?
- def classfile(sym: Symbol): String = {
- def recur(sym: Symbol): String = sym match {
- case sym if sym.owner.isPackageClass =>
- val suffix = if (sym.isModuleClass) "$" else ""
- sym.fullName + suffix
- case sym =>
- val separator = if (sym.owner.isModuleClass) "" else "$"
- recur(sym.owner) + separator + sym.javaSimpleName.toString
- }
-
- if (sym.isClass || sym.isModule) recur(sym)
- else recur(sym.enclClass)
- }
-
- // [Eugene++] this doesn't work for inner classes
- // neither does macroImpl.owner.javaClassName, so I had to roll my own implementation
- //val receiverName = macroImpl.owner.fullName
- val implClassName = classfile(macroImpl.owner)
- val implObj = try {
- val implObjClass = jClass.forName(implClassName, true, cl)
- implObjClass getField "MODULE$" get null
- } catch {
- case ex: NoSuchFieldException => macroTraceVerbose("exception when loading implObj: ")(ex); null
- case ex: NoClassDefFoundError => macroTraceVerbose("exception when loading implObj: ")(ex); null
- case ex: ClassNotFoundException => macroTraceVerbose("exception when loading implObj: ")(ex); null
- }
-
- if (implObj == null) None
- else {
- // [Eugene++] doh, it seems that I need to copy/paste Scala reflection logic
- // see `JavaMirrors.methodToJava` or whatever it's called now
- val implMeth = {
- def typeToJavaClass(tpe: Type): jClass[_] = tpe match {
- case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
- case TypeRef(_, ArrayClass, List(elemtpe)) => jArray.newInstance(typeToJavaClass(elemtpe), 0).getClass
- case TypeRef(_, sym: ClassSymbol, _) => jClass.forName(classfile(sym), true, cl)
- case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
- }
-
- val paramClasses = transformedType(macroImpl).paramTypes map typeToJavaClass
- try implObj.getClass getDeclaredMethod (macroImpl.name.toString, paramClasses: _*)
- catch {
- case ex: NoSuchMethodException =>
- val expandedName =
- if (macroImpl.isPrivate) nme.expandedName(macroImpl.name.toTermName, macroImpl.owner).toString
- else macroImpl.name.toString
- implObj.getClass getDeclaredMethod (expandedName, paramClasses: _*)
- }
- }
- macroLogVerbose("successfully loaded macro impl as (%s, %s)".format(implObj, implMeth))
- Some((implObj, implMeth))
- }
- } catch {
- case ex: ClassNotFoundException =>
- macroTraceVerbose("implementation class failed to load: ")(ex.toString)
- None
- case ex: NoSuchMethodException =>
- macroTraceVerbose("implementation method failed to load: ")(ex.toString)
- None
- }
- }
-
- loadMacroImpl(macroClassloader) map {
- case (implObj, implMeth) =>
- def runtime(args: List[Any]) = implMeth.invoke(implObj, (args map (_.asInstanceOf[AnyRef])): _*).asInstanceOf[Any]
- runtime _
- }
+ val binding = loadMacroImplBinding(macroDef)
+ val className = binding.className
+ val methName = binding.methName
+ macroLogVerbose(s"resolved implementation as $className.$methName")
+
+ // I don't use Scala reflection here, because it seems to interfere with JIT magic
+ // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
+ // I'm not sure what's the reason - for me it's pure voodoo
+ // upd. my latest experiments show that everything's okay
+ // it seems that in 2.10.1 we can easily switch to Scala reflection
+ try {
+ macroTraceVerbose("loading implementation class: ")(className)
+ macroTraceVerbose("classloader is: ")(ReflectionUtils.show(macroClassloader))
+ val implObj = ReflectionUtils.staticSingletonInstance(macroClassloader, className)
+ // relies on the fact that macro impls cannot be overloaded
+ // so every methName can resolve to at maximum one method
+ val implMeths = implObj.getClass.getDeclaredMethods.find(_.getName == methName)
+ val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
+ macroLogVerbose("successfully loaded macro impl as (%s, %s)".format(implObj, implMeth))
+ args => implMeth.invoke(implObj, ((args.c +: args.others) map (_.asInstanceOf[AnyRef])): _*)
+ } catch {
+ case ex: Exception =>
+ macroTraceVerbose(s"macro runtime failed to load: ")(ex.toString)
+ macroDef setFlag IS_ERROR
+ null
}
-
- if (runtime == None) macroDef setFlag IS_ERROR
- runtime
})
}
}
@@ -741,21 +556,15 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
val expandee = expandeeTree
} with UnaffiliatedMacroContext {
- // todo. infer precise typetag for this Expr, namely the PrefixType member of the Context refinement
val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
}
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
- *
- * This includes inferring the exact type and instance of the macro context to pass, and also
- * allowing for missing parameter sections in macro implementation (see ``macroImplParamsss'' for more info).
- *
- * @return list of runtime objects to pass to the implementation obtained by ``macroRuntime''
*/
- private def macroArgs(typer: Typer, expandee: Tree): Option[List[Any]] = {
+ case class MacroArgs(c: MacroContext, others: List[Any])
+ private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
val macroDef = expandee.symbol
- val runtime = macroRuntime(macroDef) orElse { return None }
val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
var typeArgs = List[Tree]()
@@ -771,50 +580,20 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case _ =>
}
collectMacroArgs(expandee)
- var argss: List[List[Any]] = List(context) :: exprArgs.toList
+
+ val argcDoesntMatch = macroDef.paramss.length != exprArgs.length
+ val nullaryArgsEmptyParams = exprArgs.isEmpty && macroDef.paramss == ListOfNil
+ if (argcDoesntMatch && !nullaryArgsEmptyParams) { typer.TyperErrorGen.MacroPartialApplicationError(expandee) }
+
+ val argss: List[List[Any]] = exprArgs.toList
+ macroTraceVerbose("context: ")(context)
macroTraceVerbose("argss: ")(argss)
- val rawArgss =
+
+ val preparedArgss: List[List[Any]] =
if (fastTrack contains macroDef) {
- if (fastTrack(macroDef) validate argss) argss
- else {
- // if you're getting here, it's not necessarily partial application that is at fault
- // for example, if a signature of a hardwired macro has been changed without updated FastTrack
- // then the corresponding partial function in FastTrack will refuse to process the expandee
- // validation will return false, and control flow will end up here
- // however, for simplicity sake, I didn't introduce the notion of error handling to FastTrack
- // so all kinds of validation errors produce `MacroPartialApplicationError`
- typer.TyperErrorGen.MacroPartialApplicationError(expandee)
- return None
- }
+ if (fastTrack(macroDef) validate context) argss
+ else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
} else {
- val ann = macroDef.getAnnotation(MacroImplAnnotation).getOrElse(throw new Error("assertion failed. %s: %s".format(macroDef, macroDef.annotations)))
- val macroImpl = ann.args(0).symbol
- var paramss = macroImpl.paramss
- val tparams = macroImpl.typeParams
- macroTraceVerbose("paramss: ")(paramss)
-
- // we need to take care of all possible combos of nullary/empty-paramlist macro defs vs nullary/empty-arglist invocations
- // nullary def + nullary invocation => paramss and argss match, everything is okay
- // nullary def + empty-arglist invocation => illegal Scala code, impossible, everything is okay
- // empty-paramlist def + nullary invocation => uh-oh, we need to append a List() to argss
- // empty-paramlist def + empty-arglist invocation => paramss and argss match, everything is okay
- // that's almost it, but we need to account for the fact that paramss might have context bounds that mask the empty last paramlist
- val paramss_without_evidences = transformTypeTagEvidenceParams(paramss, (param, tparam) => None)
- val isEmptyParamlistDef = paramss_without_evidences.nonEmpty && paramss_without_evidences.last.isEmpty
- val isEmptyArglistInvocation = argss.nonEmpty && argss.last.isEmpty
- if (isEmptyParamlistDef && !isEmptyArglistInvocation) {
- macroLogVerbose("isEmptyParamlistDef && !isEmptyArglistInvocation: appending a List() to argss")
- argss = argss :+ Nil
- }
-
- // nb! check partial application against paramss without evidences
- val numParamLists = paramss_without_evidences.length
- val numArgLists = argss.length
- if (numParamLists != numArgLists) {
- typer.TyperErrorGen.MacroPartialApplicationError(expandee)
- return None
- }
-
// if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
// consider the following example:
//
@@ -828,61 +607,70 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
// val outer2 = new outer1.C[String]
// outer2.foo[Boolean]
//
- // then T and U need to be inferred from the lexical scope of the call using ``asSeenFrom''
- // whereas V won't be resolved by asSeenFrom and need to be loaded directly from ``expandee'' which needs to contain a TypeApply node
+ // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
+ // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
// also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
- val resolved = collection.mutable.Map[Symbol, Type]()
- paramss = transformTypeTagEvidenceParams(paramss, (param, tparam) => {
- val TypeApply(_, implRefTargs) = ann.args(0)
- var implRefTarg = implRefTargs(tparam.paramPos).tpe.typeSymbol
- val tpe = if (implRefTarg.isTypeParameterOrSkolem) {
- if (implRefTarg.owner == macroDef) {
- // [Eugene] doesn't work when macro def is compiled separately from its usages
- // then implRefTarg is not a skolem and isn't equal to any of macroDef.typeParams
- // val paramPos = implRefTarg.deSkolemize.paramPos
- val paramPos = macroDef.typeParams.indexWhere(_.name == implRefTarg.name)
- typeArgs(paramPos).tpe
+ val binding = loadMacroImplBinding(macroDef)
+ macroTraceVerbose("binding: ")(binding)
+ val tags = binding.signature filter (_ != -1) map (paramPos => {
+ val targ = binding.targs(paramPos).tpe.typeSymbol
+ val tpe = if (targ.isTypeParameterOrSkolem) {
+ if (targ.owner == macroDef) {
+ // doesn't work when macro def is compiled separately from its usages
+ // then targ is not a skolem and isn't equal to any of macroDef.typeParams
+ // val argPos = targ.deSkolemize.paramPos
+ val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
+ typeArgs(argPos).tpe
} else
- implRefTarg.tpe.asSeenFrom(
+ targ.tpe.asSeenFrom(
if (prefixTree == EmptyTree) macroDef.owner.tpe else prefixTree.tpe,
macroDef.owner)
} else
- implRefTarg.tpe
- macroLogVerbose("resolved tparam %s as %s".format(tparam, tpe))
- resolved(tparam) = tpe
- param.tpe.typeSymbol match {
- case definitions.AbsTypeTagClass =>
- // do nothing
- case _ =>
- throw new Error("unsupported tpe: " + tpe)
- }
- Some(tparam)
+ targ.tpe
+ context.WeakTypeTag(tpe)
+ })
+ macroTraceVerbose("tags: ")(tags)
+
+ // transforms argss taking into account varargness of paramss
+ // note that typetag context bounds are only declared on macroImpls
+ // so this optional arglist might not match macroDef's paramlist
+ // nb! varargs can apply to any parameter section, not necessarily to the last one
+ mapWithIndex(argss :+ tags)((as, i) => {
+ val mapsToParamss = macroDef.paramss.indices contains i
+ if (mapsToParamss) {
+ val ps = macroDef.paramss(i)
+ if (isVarArgsList(ps)) {
+ val (normal, varargs) = as splitAt (ps.length - 1)
+ normal :+ varargs // pack all varargs into a single List argument
+ } else as
+ } else as
})
- val tags = paramss.last takeWhile (_.isType) map (resolved(_)) map (tpe => if (tpe.isConcrete) context.TypeTag(tpe) else context.AbsTypeTag(tpe))
- if (paramss.lastOption map (params => !params.isEmpty && params.forall(_.isType)) getOrElse false) argss = argss :+ Nil
- argss = argss.dropRight(1) :+ (tags ++ argss.last) // todo. add support for context bounds in argss
-
- assert(argss.length == paramss.length, "argss: %s, paramss: %s".format(argss, paramss))
- val rawArgss = for ((as, ps) <- argss zip paramss) yield {
- if (isVarArgsList(ps)) as.take(ps.length - 1) :+ as.drop(ps.length - 1)
- else as
- }
- rawArgss
}
- val rawArgs = rawArgss.flatten
- macroTraceVerbose("rawArgs: ")(rawArgs)
- Some(rawArgs)
+ macroTraceVerbose("preparedArgss: ")(preparedArgss)
+ MacroArgs(context, preparedArgss.flatten)
}
/** Keeps track of macros in-flight.
- * See more informations in comments to ``openMacros'' in ``scala.reflect.macros.Context''.
+ * See more informations in comments to `openMacros` in `scala.reflect.macros.Context`.
*/
- var openMacros = List[MacroContext]()
+ private var _openMacros = List[MacroContext]()
+ def openMacros = _openMacros
+ private def pushMacroContext(c: MacroContext) = _openMacros ::= c
+ private def popMacroContext() = _openMacros = _openMacros.tail
def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
+ private sealed abstract class MacroExpansionResult
+ private case class Success(expanded: Tree) extends MacroExpansionResult
+ private case class Fallback(fallback: Tree) extends MacroExpansionResult { currentRun.seenMacroExpansionsFallingBack = true }
+ private case class Other(result: Tree) extends MacroExpansionResult
+ private def Delay(expanded: Tree) = Other(expanded)
+ private def Skip(expanded: Tree) = Other(expanded)
+ private def Cancel(expandee: Tree) = Other(expandee)
+ private def Failure(expandee: Tree) = Other(expandee)
+
/** Performs macro expansion:
- * 1) Checks whether the expansion needs to be delayed (see ``mustDelayMacroExpansion'')
- * 2) Loads macro implementation using ``macroMirror''
+ * 1) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
+ * 2) Loads macro implementation using `macroMirror`
* 3) Synthesizes invocation arguments for the macro implementation
* 4) Checks that the result is a tree bound to this universe
* 5) Typechecks the result against the return type of the macro definition
@@ -903,52 +691,28 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* the expandee with an error marker set if there has been an error
*/
def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
- def fail(what: String, tree: Tree): Tree = {
- val err = typer.context.errBuffer.head
- this.fail(typer, tree, err.errPos, "failed to %s: %s".format(what, err.errMsg))
- return expandee
- }
- val start = Statistics.startTimer(macroExpandNanos)
- Statistics.incCounter(macroExpandCount)
+ val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
+ if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
macroExpand1(typer, expandee) match {
- case Success(expanded0) =>
+ case Success(expanded) =>
try {
- val expanded = expanded0 // virtpatmat swallows the local for expandee from the match
- // so I added this dummy local for the ease of debugging
- var expectedTpe = expandee.tpe
-
- // [Eugene] weird situation. what's the conventional way to deal with it?
- val isNullaryInvocation = expandee match {
- case TypeApply(Select(_, _), _) => true
- case TypeApply(Ident(_), _) => true
- case Select(_, _) => true
- case Ident(_) => true
- case _ => false
- }
- if (isNullaryInvocation) expectedTpe match {
- case NullaryMethodType(restpe) =>
- macroTraceVerbose("nullary invocation of a nullary method. unwrapping expectedTpe from " + expectedTpe + " to: ")(restpe)
- expectedTpe = restpe
- case MethodType(Nil, restpe) =>
- macroTraceVerbose("nullary invocation of a method with an empty parameter list. unwrapping expectedTpe from " + expectedTpe + " to: ")(restpe)
- expectedTpe = restpe
- case _ => ;
+ def typecheck(phase: String, tree: Tree, pt: Type): Tree = {
+ if (tree.isErroneous) return tree
+ macroLogVerbose(s"typechecking against $phase $pt: $expanded")
+ val numErrors = reporter.ERROR.count
+ def hasNewErrors = reporter.ERROR.count > numErrors
+ val result = typer.context.withImplicitsEnabled(typer.typed(tree, EXPRmode, pt))
+ macroTraceVerbose(s"""${if (hasNewErrors) "failed to typecheck" else "successfully typechecked"} against $phase $pt:\n$result\n""")(result)
}
- macroLogVerbose("typechecking1 against %s: %s".format(expectedTpe, expanded))
- var typechecked = typer.context.withImplicitsEnabled(typer.typed(expanded, EXPRmode, expectedTpe))
- if (typer.context.hasErrors) fail("typecheck against macro def return type", expanded)
- macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
-
- macroLogVerbose("typechecking2 against %s: %s".format(pt, expanded))
- typechecked = typer.context.withImplicitsEnabled(typer.typed(typechecked, EXPRmode, pt))
- if (typer.context.hasErrors) fail("typecheck against expected type", expanded)
- macroLogVerbose("typechecked2:%n%s%n%s".format(typechecked, showRaw(typechecked)))
-
- typechecked addAttachment MacroExpansionAttachment(expandee)
+ var expectedTpe = expandee.tpe
+ if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
+ var typechecked = typecheck("macro def return type", expanded, expectedTpe)
+ typechecked = typecheck("expected type", typechecked, pt)
+ typechecked
} finally {
- openMacros = openMacros.tail
+ popMacroContext()
}
case Fallback(fallback) =>
typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
@@ -956,51 +720,28 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
result
}
} finally {
- Statistics.stopTimer(macroExpandNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
}
}
- private sealed abstract class MacroExpansionResult extends Product with Serializable
- private case class Success(expanded: Tree) extends MacroExpansionResult
- private case class Fallback(fallback: Tree) extends MacroExpansionResult
- private case class Other(result: Tree) extends MacroExpansionResult
- private def Delay(expanded: Tree) = Other(expanded)
- private def Skip(expanded: Tree) = Other(expanded)
- private def Cancel(expandee: Tree) = Other(expandee)
- private def Failure(expandee: Tree) = Other(expandee)
- private def fail(typer: Typer, expandee: Tree, pos: Position = NoPosition, msg: String = null) = {
- def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
- macroLogLite("macro expansion has failed: %s".format(msgForLog))
- val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
- if (msg != null) typer.context.error(errorPos, msg)
- typer.infer.setError(expandee)
- Failure(expandee)
- }
-
- /** Does the same as ``macroExpand'', but without typechecking the expansion
+ /** Does the same as `macroExpand`, but without typechecking the expansion
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
- // InfoLevel.Verbose examines and prints out infos of symbols
- // by the means of this'es these symbols can climb up the lexical scope
- // when these symbols will be examined by a node printer
- // they will enumerate and analyze their children (ask for infos and tpes)
- // if one of those children involves macro expansion, things might get nasty
- // that's why I'm temporarily turning this behavior off
+ // verbose printing might cause recursive macro expansions, so I'm shutting it down here
withInfoLevel(nodePrinters.InfoLevel.Quiet) {
- // if a macro implementation is incompatible or any of the arguments are erroneous
- // there is no sense to expand the macro itself => it will only make matters worse
if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
macroTraceVerbose("cancelled macro expansion because of %s: ".format(reason))(expandee)
return Cancel(typer.infer.setError(expandee))
}
- macroRuntime(expandee.symbol) match {
- case Some(runtime) =>
- macroExpandWithRuntime(typer, expandee, runtime)
- case None =>
- macroExpandWithoutRuntime(typer, expandee)
+ try {
+ val runtime = macroRuntime(expandee.symbol)
+ if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
+ else macroExpandWithoutRuntime(typer, expandee)
+ } catch {
+ case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
}
}
@@ -1008,185 +749,69 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
- def issueFreeError(sym: FreeSymbol) = {
- val template = (
- "Macro expansion contains free @kind@ variable %s. Have you forgotten to use %s? "
- + "If you have troubles tracking free @kind@ variables, consider using -Xlog-free-@kind@s"
- )
- val forgotten = (
- if (sym.isTerm) "splice when splicing this variable into a reifee"
- else "c.AbsTypeTag annotation for this type parameter"
- )
- typer.context.error(expandee.pos,
- template.replaceAllLiterally("@kind@", sym.name.nameKind).format(
- sym.name + " " + sym.origin, forgotten)
- )
- }
- def macroExpandInternal = {
- val wasDelayed = isDelayed(expandee)
- val undetparams = calculateUndetparams(expandee)
- val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
-
- def failExpansion(msg: String = null) = fail(typer, expandee, msg = msg)
- def performExpansion(args: List[Any]): MacroExpansionResult = {
- val numErrors = reporter.ERROR.count
- def hasNewErrors = reporter.ERROR.count > numErrors
-
- val expanded = runtime(args)
-
- if (hasNewErrors)
- failExpansion() // errors have been reported by the macro itself
- else expanded match {
- case expanded: Expr[_] =>
- macroLogVerbose("original:")
- macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
-
- expanded.tree.freeTerms foreach issueFreeError
- expanded.tree.freeTypes foreach issueFreeError
- if (hasNewErrors) failExpansion()
-
- // inherit the position from the first position-ful expandee in macro callstack
- // this is essential for sane error messages
- // now macro expansion gets typechecked against the macro definition return type
- // however, this happens in macroExpand, not here in macroExpand1
- else Success(atPos(enclosingMacroPosition.focus)(expanded.tree))
- case _ =>
- failExpansion(
- "macro must return a compiler-specific expr; returned value is " + (
- if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
- else " of " + expanded.getClass
- )
- )
- }
- }
-
- if (wasDelayed) {
- if (nowDelayed) Delay(expandee)
- else Skip(macroExpandAll(typer, expandee))
- }
- else {
- macroLogLite("typechecking macro expansion %s at %s".format(expandee, expandee.pos))
- macroArgs(typer, expandee).fold(failExpansion(): MacroExpansionResult) {
- args => (args: @unchecked) match {
- // [Eugene++] crashes virtpatmat:
- // case args @ ((context: MacroContext) :: _) =>
- case args @ (context0 :: _) =>
- val context = context0.asInstanceOf[MacroContext]
- if (nowDelayed) {
- macroLogLite("macro expansion is delayed: %s".format(expandee))
- delayed += expandee -> undetparams
- // need to save typer context for `macroExpandAll`
- // need to save macro context to preserve enclosures
- expandee addAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(context.asInstanceOf[MacroContext]))
- Delay(expandee)
- }
- else {
- // adding stuff to openMacros is easy, but removing it is a nightmare
- // it needs to be sprinkled over several different code locations
- // why? https://github.com/scala/scala/commit/bd3eacbae21f39b1ac7fe8ade4ed71fa98e1a28d#L2R1137
- // todo. will be improved
- openMacros ::= context
- var isSuccess = false
- try performExpansion(args) match {
- case x: Success => isSuccess = true ; x
- case x => x
- }
- finally {
- expandee.removeAttachment[MacroRuntimeAttachment]
- if (!isSuccess) openMacros = openMacros.tail
- }
- }
+ val wasDelayed = isDelayed(expandee)
+ val undetparams = calculateUndetparams(expandee)
+ val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
+
+ (wasDelayed, nowDelayed) match {
+ case (true, true) => Delay(expandee)
+ case (true, false) => Skip(macroExpandAll(typer, expandee))
+ case (false, true) =>
+ macroLogLite("macro expansion is delayed: %s".format(expandee))
+ delayed += expandee -> undetparams
+ expandee updateAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(macroArgs(typer, expandee).c))
+ Delay(expandee)
+ case (false, false) =>
+ import typer.TyperErrorGen._
+ macroLogLite("performing macro expansion %s at %s".format(expandee, expandee.pos))
+ val args = macroArgs(typer, expandee)
+ try {
+ val numErrors = reporter.ERROR.count
+ def hasNewErrors = reporter.ERROR.count > numErrors
+ val expanded = { pushMacroContext(args.c); runtime(args) }
+ if (hasNewErrors) MacroGeneratedTypeError(expandee)
+ expanded match {
+ case expanded: Expr[_] =>
+ macroLogVerbose("original:")
+ macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
+ val freeSyms = expanded.tree.freeTerms ++ expanded.tree.freeTypes
+ freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
+ Success(atPos(enclosingMacroPosition.focus)(expanded.tree updateAttachment MacroExpansionAttachment(expandee)))
+ case _ =>
+ MacroExpansionIsNotExprError(expandee, expanded)
}
+ } catch {
+ case ex: Throwable =>
+ popMacroContext()
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ realex match {
+ case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex)
+ case ex: ControlThrowable => throw ex
+ case ex: TypeError => MacroGeneratedTypeError(expandee, ex)
+ case _ => MacroGeneratedException(expandee, realex)
+ }
+ } finally {
+ expandee.removeAttachment[MacroRuntimeAttachment]
}
- }
}
-
- try macroExpandInternal
- catch { case ex: Throwable => handleMacroExpansionException(typer, expandee, ex) }
}
+ /** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
+ * Meant for internal use within the macro infrastructure, don't use it elsewhere.
+ */
private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
- val macroDef = expandee.symbol
- def notFound() = {
- typer.context.error(expandee.pos, "macro implementation not found: " + macroDef.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
- None
- }
- def fallBackToOverridden(tree: Tree): Option[Tree] = {
+ import typer.TyperErrorGen._
+ val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
+ macroTraceLite("falling back to: ")(fallbackSym)
+
+ def mkFallbackTree(tree: Tree): Tree = {
tree match {
- case Select(qual, name) if (macroDef.isTermMacro) =>
- macroDef.allOverriddenSymbols match {
- case first :: _ =>
- Some(Select(qual, name) setPos tree.pos setSymbol first)
- case _ =>
- macroTraceVerbose("macro is not overridden: ")(tree)
- notFound()
- }
- case Apply(fn, args) =>
- fallBackToOverridden(fn) match {
- case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos)
- case _ => None
- }
- case TypeApply(fn, args) =>
- fallBackToOverridden(fn) match {
- case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos)
- case _ => None
- }
- case _ =>
- macroTraceVerbose("unexpected tree in fallback: ")(tree)
- notFound()
+ case Select(qual, name) => Select(qual, name) setPos tree.pos setSymbol fallbackSym
+ case Apply(fn, args) => Apply(mkFallbackTree(fn), args) setPos tree.pos
+ case TypeApply(fn, args) => TypeApply(mkFallbackTree(fn), args) setPos tree.pos
}
}
- fallBackToOverridden(expandee) match {
- case Some(tree1) =>
- macroTraceLite("falling back to: ")(tree1)
- currentRun.macroExpansionFailed = true
- Fallback(tree1)
- case None =>
- fail(typer, expandee)
- }
- }
-
- private def handleMacroExpansionException(typer: Typer, expandee: Tree, ex: Throwable): MacroExpansionResult = {
- // [Eugene] any ideas about how to improve this one?
- val realex = ReflectionUtils.unwrapThrowable(ex)
- realex match {
- case realex: reflect.macros.runtime.AbortMacroException =>
- macroLogVerbose("macro expansion has failed: %s".format(realex.msg))
- fail(typer, expandee) // error has been reported by abort
- case err: TypeError =>
- macroLogLite("macro expansion has failed: %s at %s".format(err.msg, err.pos))
- throw err // error should be propagated, don't report
- case _ =>
- val message = {
- try {
- // [Eugene] is there a better way?
- // [Paul] See Exceptional.scala and Origins.scala.
- val relevancyThreshold = realex.getStackTrace().indexWhere(este => este.getMethodName == "macroExpand1")
- if (relevancyThreshold == -1) None
- else {
- var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
- def isMacroInvoker(este: StackTraceElement) = este.isNativeMethod || (este.getClassName != null && (este.getClassName contains "fastTrack"))
- var threshold = relevantElements.reverse.indexWhere(isMacroInvoker) + 1
- while (threshold != relevantElements.length && isMacroInvoker(relevantElements(relevantElements.length - threshold - 1))) threshold += 1
- relevantElements = relevantElements dropRight threshold
-
- realex.setStackTrace(relevantElements)
- val message = new java.io.StringWriter()
- realex.printStackTrace(new java.io.PrintWriter(message))
- Some(EOL + message)
- }
- } catch {
- // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
- case ex: Throwable =>
- None
- }
- } getOrElse {
- val msg = realex.getMessage
- if (msg != null) msg else realex.getClass.getName
- }
- fail(typer, expandee, msg = "exception during macro expansion: " + message)
- }
+ Fallback(mkFallbackTree(expandee))
}
/** Without any restrictions on macro expansion, macro applications will expand at will,
@@ -1202,11 +827,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, collection.mutable.Set[Int]]
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
private def isDelayed(expandee: Tree) = delayed contains expandee
- private def calculateUndetparams(expandee: Tree): collection.mutable.Set[Int] =
+ private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
delayed.get(expandee).getOrElse {
- val calculated = collection.mutable.Set[Symbol]()
+ val calculated = scala.collection.mutable.Set[Symbol]()
expandee foreach (sub => {
def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
if (sub.symbol != null) traverse(sub.symbol)
@@ -1238,19 +863,19 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
/** Performs macro expansion on all subtrees of a given tree.
* Innermost macros are expanded first, outermost macros are expanded last.
- * See the documentation for ``macroExpand'' for more information.
+ * See the documentation for `macroExpand` for more information.
*/
def macroExpandAll(typer: Typer, expandee: Tree): Tree =
new Transformer {
override def transform(tree: Tree) = super.transform(tree match {
// todo. expansion should work from the inside out
- case wannabe if (delayed contains wannabe) && calculateUndetparams(wannabe).isEmpty =>
- val context = wannabe.attachments.get[MacroRuntimeAttachment].get.typerContext
- delayed -= wannabe
+ case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+ val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
+ delayed -= tree
context.implicitsEnabled = typer.context.implicitsEnabled
context.enrichmentEnabled = typer.context.enrichmentEnabled
context.macrosEnabled = typer.context.macrosEnabled
- macroExpand(newTyper(context), wannabe, EXPRmode, WildcardType)
+ macroExpand(newTyper(context), tree, EXPRmode, WildcardType)
case _ =>
tree
})
@@ -1258,7 +883,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
}
object MacrosStats {
- import reflect.internal.TypesStats.typerNanos
+ import scala.reflect.internal.TypesStats.typerNanos
val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index dd180e6b76..91dcd90962 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -10,7 +10,7 @@ import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ ojoin }
import scala.reflect.ClassTag
import scala.reflect.runtime.{ universe => ru }
-import language.higherKinds
+import scala.language.higherKinds
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
@@ -31,7 +31,6 @@ trait MethodSynthesis {
else DefDef(sym, body)
def applyTypeInternal(tags: List[TT[_]]): Type = {
- // [Eugene++ to Paul] needs review!!
val symbols = tags map compilerSymbolFromTag
val container :: args = symbols
val tparams = container.typeConstructor.typeParams
@@ -53,21 +52,14 @@ trait MethodSynthesis {
applyTypeInternal(List(t1))
def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type =
- applyTypeInternal(List[TT[_]](t1, t2))
+ applyTypeInternal(List(t1, t2))
def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type =
- // [Eugene++] without an explicit type annotation for List, we get this:
- // [scalacfork] C:\Projects\KeplerUnderRefactoring\src\compiler\scala\tools\nsc\typechecker\MethodSynthesis.scala:59: error: no type parameters for method apply: (xs: A*)List[A] in object List exist so that it can be applied to arguments (scala.tools.nsc.typechecker.MethodSynthesis.synthesisUtil.TT[CC[_, _]], scala.tools.nsc.typechecker.MethodSynthesis.synthesisUtil.TT[X1], scala.tools.nsc.typechecker.MethodSynthesis.synthesisUtil.TT[X2])
- // [scalacfork] --- because ---
- // [scalacfork] undetermined type
- // [scalacfork] applyTypeInternal(List(t1, t2, t3))
- applyTypeInternal(List[TT[_]](t1, t2, t3))
+ applyTypeInternal(List(t1, t2, t3))
def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type =
- applyTypeInternal(List[TT[_]](t1, t2, t3, t4))
+ applyTypeInternal(List(t1, t2, t3, t4))
- // [Martin->Eugene] !!! reinstantiate when typeables are in.
- // [Eugene++->Martin] now this compiles, will soon check it out
def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = {
val fnSymbol = compilerSymbolFromTag(t)
val formals = compilerTypeFromTag(t).typeArguments
@@ -250,7 +242,7 @@ trait MethodSynthesis {
abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", "))
}
case _ =>
- List(stat)
+ stat :: Nil
}
def standardAccessors(vd: ValDef): List[DerivedFromValDef] = (
@@ -377,7 +369,7 @@ trait MethodSynthesis {
}
/** A synthetic method which performs the implicit conversion implied by
- * the declaration of an implicit class. Yet to be written.
+ * the declaration of an implicit class.
*/
case class ImplicitClassWrapper(tree: ClassDef) extends DerivedFromClassDef {
def completer(sym: Symbol): Type = ??? // not needed
@@ -385,7 +377,7 @@ trait MethodSynthesis {
def derivedSym: Symbol = {
// Only methods will do! Don't want to pick up any stray
// companion objects of the same name.
- val result = enclClass.info decl name suchThat (_.isMethod)
+ val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic)
assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
result
}
@@ -499,7 +491,7 @@ trait MethodSynthesis {
// Derives a tree without attempting to use the original tree's symbol.
override def derivedTree = {
atPos(tree.pos.focus) {
- DefDef(derivedMods, name, Nil, List(Nil), tree.tpt.duplicate,
+ DefDef(derivedMods, name, Nil, ListOfNil, tree.tpt.duplicate,
if (isDeferred) EmptyTree else Select(This(owner), tree.name)
)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index efb96b173c..1b2225f5f2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -192,6 +192,10 @@ trait Namers extends MethodSynthesis {
if (!allowsOverload(sym)) {
val prev = scope.lookupEntry(sym.name)
if ((prev ne null) && prev.owner == scope && conflict(sym, prev.sym)) {
+ if (sym.isSynthetic || prev.sym.isSynthetic) {
+ handleSyntheticNameConflict(sym, prev.sym)
+ handleSyntheticNameConflict(prev.sym, sym)
+ }
DoubleDefError(sym, prev.sym)
sym setInfo ErrorType
scope unlink prev.sym // let them co-exist...
@@ -202,6 +206,14 @@ trait Namers extends MethodSynthesis {
scope enter sym
}
+ /** Logic to handle name conflicts of synthetically generated symbols
+ * We handle right now: t6227
+ */
+ def handleSyntheticNameConflict(sym1: Symbol, sym2: Symbol) = {
+ if (sym1.isImplicit && sym1.isMethod && sym2.isModule && sym2.companionClass.isCaseClass)
+ validate(sym2.companionClass)
+ }
+
def enterSym(tree: Tree): Context = {
def dispatch() = {
var returnContext = this.context
@@ -347,10 +359,39 @@ trait Namers extends MethodSynthesis {
}
}
+ /** Given a ClassDef or ModuleDef, verifies there isn't a companion which
+ * has been defined in a separate file.
+ */
+ private def validateCompanionDefs(tree: ImplDef) {
+ val sym = tree.symbol
+ if (sym eq NoSymbol) return
+
+ val ctx = if (context.owner.isPackageObjectClass) context.outer else context
+ val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
+ val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+ val fails = (
+ module.isModule
+ && clazz.isClass
+ && !module.isSynthetic
+ && !clazz.isSynthetic
+ && (clazz.sourceFile ne null)
+ && (module.sourceFile ne null)
+ && !(module isCoDefinedWith clazz)
+ )
+ if (fails) {
+ context.unit.error(tree.pos, (
+ s"Companions '$clazz' and '$module' must be defined in same file:\n"
+ + s" Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}")
+ )
+ }
+ }
+
def enterModuleDef(tree: ModuleDef) = {
val sym = enterModuleSymbol(tree)
sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree)
sym setInfo completerOf(tree)
+ validateCompanionDefs(tree)
+ sym
}
/** Enter a module symbol. The tree parameter can be either
@@ -455,7 +496,6 @@ trait Namers extends MethodSynthesis {
// for Java code importing Scala objects
if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
typer.TyperErrorGen.NotAMemberError(tree, expr, from)
- typer.infer.setError(tree)
}
}
// Setting the position at the import means that if there is
@@ -597,7 +637,7 @@ trait Namers extends MethodSynthesis {
MaxParametersCaseClassError(tree)
val m = ensureCompanionObject(tree, caseModuleDef)
- m.moduleClass.addAttachment(new ClassForCaseCompanionAttachment(tree))
+ m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
}
val hasDefault = impl.body exists {
case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
@@ -605,7 +645,7 @@ trait Namers extends MethodSynthesis {
}
if (hasDefault) {
val m = ensureCompanionObject(tree)
- m.addAttachment(new ConstructorDefaultsAttachment(tree, null))
+ m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
@@ -623,6 +663,7 @@ trait Namers extends MethodSynthesis {
}
else context.unit.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter")
}
+ validateCompanionDefs(tree)
}
// this logic is needed in case typer was interrupted half
@@ -687,7 +728,7 @@ trait Namers extends MethodSynthesis {
// }
}
- def moduleClassTypeCompleter(tree: Tree) = {
+ def moduleClassTypeCompleter(tree: ModuleDef) = {
mkTypeCompleter(tree) { sym =>
val moduleSymbol = tree.symbol
assert(moduleSymbol.moduleClass == sym, moduleSymbol.moduleClass)
@@ -973,7 +1014,7 @@ trait Namers extends MethodSynthesis {
// Add a () parameter section if this overrides some method with () parameters.
if (clazz.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
_.info.isInstanceOf[MethodType])) {
- vparamSymss = List(List())
+ vparamSymss = ListOfNil
}
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
@@ -988,6 +1029,15 @@ trait Namers extends MethodSynthesis {
// (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
if (fastTrack contains ddef.symbol) ddef.symbol setFlag MACRO
+ // macro defs need to be typechecked in advance
+ // because @macroImpl annotation only gets assigned during typechecking
+ // otherwise macro defs wouldn't be able to robustly coexist with their clients
+ // because a client could be typechecked before a macro def that it uses
+ if (ddef.symbol.isTermMacro) {
+ val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
+ typer.computeMacroDefType(ddef, pt)
+ }
+
thisMethodType({
val rt = (
if (!tpt.isEmpty) {
@@ -1023,7 +1073,7 @@ trait Namers extends MethodSynthesis {
var baseParamss = (vparamss, overridden.tpe.paramss) match {
// match empty and missing parameter list
case (Nil, List(Nil)) => Nil
- case (List(Nil), Nil) => List(Nil)
+ case (List(Nil), Nil) => ListOfNil
case (_, paramss) => paramss
}
assert(
@@ -1122,7 +1172,7 @@ trait Namers extends MethodSynthesis {
// symbol will be re-entered in the scope but the default parameter will not.
val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
case Some(att) => att.defaultGetters += default
- case None => meth.addAttachment(new DefaultsOfLocalMethodAttachment(default))
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
}
}
} else if (baseHasDefault) {
@@ -1381,6 +1431,7 @@ trait Namers extends MethodSynthesis {
fail(ImplicitAtToplevel)
}
if (sym.isClass) {
+ checkNoConflict(IMPLICIT, CASE)
if (sym.isAnyOverride && !sym.hasFlag(TRAIT))
fail(OverrideClass)
} else {
@@ -1455,7 +1506,7 @@ trait Namers extends MethodSynthesis {
/** A class representing a lazy type with known type parameters.
*/
- class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter {
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
private val ownerSym = owner.symbol
override val typeParams = tparams map (_.symbol) //@M
override val tree = restp.tree
@@ -1523,18 +1574,11 @@ trait Namers extends MethodSynthesis {
* call this method?
*/
def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
- try {
- original.companionSymbol orElse {
- ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
- (original.isTerm || sym.hasModuleFlag) &&
- (sym isCoDefinedWith original)
- )
- }
- }
- catch {
- case e: InvalidCompanions =>
- ctx.unit.error(original.pos, e.getMessage)
- NoSymbol
+ original.companionSymbol orElse {
+ ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
+ (original.isTerm || sym.hasModuleFlag) &&
+ (sym isCoDefinedWith original)
+ )
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 4776c3b45f..2282f62152 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -8,13 +8,13 @@ package scala.tools.nsc
package typechecker
import symtab._
-import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC, HIDDEN}
-import language.postfixOps
+import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC, ARTIFACT}
+import scala.language.postfixOps
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.collection.mutable.HashSet
import scala.collection.mutable.HashMap
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
import scala.reflect.internal.Types
/** Translate pattern matching.
@@ -67,10 +67,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
object exceeded extends Exception {
val advice = s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)"
}
-
- object stackOverflow extends Exception {
- val advice = "(There was a stack overflow. Please try increasing the stack available to the compiler using e.g., -Xss2m.)"
- }
}
def newTransformer(unit: CompilationUnit): Transformer =
@@ -198,6 +194,69 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
import typer.{typed, context, silent, reallyExists}
// import typer.infer.containsUnchecked
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(m: Match) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = m.cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
*
@@ -210,6 +269,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
def translateMatch(match_ : Match): Tree = {
val Match(selector, cases) = match_
+ checkMatchVariablePatterns(match_)
// we don't transform after uncurry
// (that would require more sophistication when generating trees,
@@ -217,7 +277,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases)
patmatDebug("translating "+ cases.mkString("{", "\n", "}"))
- val start = Statistics.startTimer(patmatNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
@@ -230,22 +290,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
removeCPSAdaptAnnotations(origPt)
else origPt
- // we've packed the type for each case in typedMatch so that if all cases have the same existential case, we get a clean lub
- // here, we should open up the existential again
// relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
- // TODO: fix skolemizeExistential (it should preserve annotations, right?)
- val pt = repeatedToSeq(ptUnCPS.skolemizeExistential(context.owner, context.tree) withAnnotations ptUnCPS.annotations)
+ // pt is the skolemized version
+ val pt = repeatedToSeq(ptUnCPS)
+
+ // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
// the alternative to attaching the default case override would be to simply
// append the default to the list of cases and suppress the unreachable case error that may arise (once we detect that...)
val matchFailGenOverride = match_.attachments.get[DefaultOverrideMatchAttachment].map{case DefaultOverrideMatchAttachment(default) => ((scrut: Tree) => default)}
- val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
+ val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
- Statistics.stopTimer(patmatNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
combined
}
@@ -327,8 +387,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
// a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- @inline def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- @inline def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
+ def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
+ def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
val pos = patTree.pos
@@ -457,7 +517,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
case _ =>
- error("unsupported pattern: "+ patTree +"(a "+ patTree.getClass +")")
+ typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
noFurtherSubPats()
}
@@ -663,8 +723,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// binder has type paramType
def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ val paramAccessors = binder.constrParamAccessors
+ // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ val mutableBinders =
+ if (paramAccessors exists (_.isMutable))
+ subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
+ else Nil
+
// checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder))
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
@@ -793,7 +860,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
else (new Transformer {
- @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
+ private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
if (origTp == null || origTp == NoType) to
// important: only type when actually substing and when original tree was typed
// (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
@@ -926,10 +993,27 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
}
- trait PreserveSubPatBinders extends NoNewBinders {
+ // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
+ protected val debugInfoEmitVars = !settings.optimise.value
+
+ trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
+ // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
+ // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
+ def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty
+
+ def emitVars = storedBinders.nonEmpty
+
+ private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
+
+ protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs)
+ else {
+ val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
+ Substitution(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
+ }
+
/** The substitution that specifies the trees that compute the values of the subpattern binders.
*
* We pretend to replace the subpattern binders by subpattern refs
@@ -939,7 +1023,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
import CODE._
- def bindSubPats(in: Tree): Tree = Block(map2(subPatBinders, subPatRefs)(VAL(_) === _), in)
+ def bindSubPats(in: Tree): Tree = if (!emitVars) in
+ else {
+ val (subPatBindersStored, subPatRefsStored) = stored.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
}
/**
@@ -1000,11 +1088,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
val subPatBinders: List[Symbol],
- val subPatRefs: List[Tree]) extends FunTreeMaker with PreserveSubPatBinders {
+ val subPatRefs: List[Tree],
+ val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders {
import CODE._
val nextBinder = prevBinder // just passing through
+ // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
+ // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant)
+ override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet
+
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
@@ -1043,13 +1136,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
val expectedOuter = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
+ case ThisType(clazz) => THIS(clazz)
+ case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
+ case _ => TRUE_typed // fallback for SI-6183
}
// ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
// if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC | HIDDEN
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC | ARTIFACT
(Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
}
@@ -1114,7 +1208,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
else typeTest(testedBinder, expectedTp)
// propagate expected type
- @inline def expTp(t: Tree): t.type = t setType expectedTp
+ def expTp(t: Tree): t.type = t setType expectedTp
// true when called to type-test the argument to an extractor
// don't do any fancy equality checking, just test the type
@@ -1326,7 +1420,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// local / context-free
def _asInstanceOf(b: Symbol, tp: Type): Tree
- def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree
+ def _asInstanceOf(t: Tree, tp: Type): Tree
def _equals(checker: Tree, binder: Symbol): Tree
def _isInstanceOf(b: Symbol, tp: Type): Tree
def and(a: Tree, b: Tree): Tree
@@ -1384,7 +1478,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Typed(gen.mkAsInstanceOf(t, tp.withoutAnnotations, true, false), TypeTree() setType tp)
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp)
+ def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else mkCast(REF(b), tp)
def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
// if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
@@ -1464,7 +1558,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var currId = 0
}
case class Test(cond: Cond, treeMaker: TreeMaker) {
- // private val reusedBy = new collection.mutable.HashSet[Test]
+ // private val reusedBy = new scala.collection.mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
@@ -1493,16 +1587,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
object EqualityCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
+ private val uniques = new scala.collection.mutable.HashMap[(Tree, Tree), EqualityCond]
def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
- def unapply(c: EqualityCond) = Some(c.testedPath, c.rhs)
+ def unapply(c: EqualityCond) = Some((c.testedPath, c.rhs))
}
class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
override def toString = testedPath +" == "+ rhs +"#"+ id
}
object NonNullCond {
- private val uniques = new collection.mutable.HashMap[Tree, NonNullCond]
+ private val uniques = new scala.collection.mutable.HashMap[Tree, NonNullCond]
def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
def unapply(c: NonNullCond) = Some(c.testedPath)
}
@@ -1511,9 +1605,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
object TypeCond {
- private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
+ private val uniques = new scala.collection.mutable.HashMap[(Tree, Type), TypeCond]
def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
- def unapply(c: TypeCond) = Some(c.testedPath, c.pt)
+ def unapply(c: TypeCond) = Some((c.testedPath, c.pt))
}
class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
override def toString = testedPath +" : "+ pt +"#"+ id
@@ -1551,7 +1645,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
- Some(extractor, nextBinder)
+ Some((extractor, nextBinder))
case _ =>
None
}
@@ -1560,8 +1654,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
class TreeMakersToConds(val root: Symbol) {
// a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- private val pointsToBound = collection.mutable.HashSet(root)
- private val trees = collection.mutable.HashSet.empty[Tree]
+ private val pointsToBound = scala.collection.mutable.HashSet(root)
+ private val trees = scala.collection.mutable.HashSet.empty[Tree]
// the substitution that renames variables to variables in pointsToBound
private var normalize: Substitution = EmptySubstitution
@@ -1600,8 +1694,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
final def binderToUniqueTree(b: Symbol) =
unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
- @inline def /\(conds: Iterable[Cond]) = if (conds.isEmpty) TrueCond else conds.reduceLeft(AndCond(_, _))
- @inline def \/(conds: Iterable[Cond]) = if (conds.isEmpty) FalseCond else conds.reduceLeft(OrCond(_, _))
+ def /\(conds: Iterable[Cond]) = if (conds.isEmpty) TrueCond else conds.reduceLeft(AndCond(_, _))
+ def \/(conds: Iterable[Cond]) = if (conds.isEmpty) FalseCond else conds.reduceLeft(OrCond(_, _))
// note that the sequencing of operations is important: must visit in same order as match execution
// binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
@@ -1753,20 +1847,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
class Prop
case class Eq(p: Var, q: Const) extends Prop
- type Const <: AbsConst
- trait AbsConst {
- // when we know V = C, which other equalities must hold
- // in general, equality to some type implies equality to its supertypes
- // (this multi-valued kind of equality is necessary for unreachability)
- // note that we use subtyping as a model for implication between instanceof tests
- // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
- // unfortunately this is not true in general (see e.g. SI-6022)
- def implies(other: Const): Boolean
-
- // does V = C preclude V having value `other`? V = null is an exclusive assignment,
- // but V = 1 does not preclude V = Int, or V = Any
- def excludes(other: Const): Boolean
- }
+ type Const
type TypeConst <: Const
def TypeConst: TypeConstExtractor
@@ -1783,7 +1864,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def registerEquality(c: Const): Unit
// call this to indicate null is part of the domain
- def registerNull: Unit
+ def registerNull(): Unit
// can this variable be null?
def mayBeNull: Boolean
@@ -1801,8 +1882,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def propForEqualsTo(c: Const): Prop
// populated by registerEquality
- // once equalitySyms has been called, must not call registerEquality anymore
- def equalitySyms: List[Sym]
+ // once implications has been called, must not call registerEquality anymore
+ def implications: List[(Sym, List[Sym], List[Sym])]
}
// would be nice to statically check whether a prop is equational or pure,
@@ -1822,8 +1903,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
- @inline def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
- @inline def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+ def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
+ def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
trait PropTraverser {
@@ -1873,9 +1954,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
- val start = Statistics.startTimer(patmatAnaVarEq)
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
- val vars = new collection.mutable.HashSet[Var]
+ val vars = new scala.collection.mutable.HashSet[Var]
object gatherEqualities extends PropTraverser {
override def apply(p: Prop) = p match {
@@ -1899,21 +1980,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val pure = props map rewriteEqualsToProp.apply
var eqAxioms: Prop = True
- @inline def addAxiom(p: Prop) = eqAxioms = And(eqAxioms, p)
-
- case class ExcludedPair(a: Const, b: Const) {
- override def equals(o: Any) = o match {
- case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
- case _ => false
- }
- // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
- override def hashCode = a.hashCode ^ b.hashCode
- }
+ def addAxiom(p: Prop) = eqAxioms = And(eqAxioms, p)
patmatDebug("removeVarEq vars: "+ vars)
vars.foreach { v =>
- val excludedPair = new collection.mutable.HashSet[ExcludedPair]
-
// if v.domainSyms.isEmpty, we must consider the domain to be infinite
// otherwise, since the domain fully partitions the type of the value,
// exactly one of the types (and whatever it implies, imposed separately) must be chosen
@@ -1928,34 +1998,18 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
else addAxiom(symForStaticTp)
}
- val syms = v.equalitySyms
- patmatDebug("eqSyms "+(v, syms))
- syms foreach { sym =>
- // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
- // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
- val todo = syms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
- val (excluded, notExcluded) = todo partition (b => sym.const.excludes(b.const))
- val implied = notExcluded filter (b => sym.const.implies(b.const))
-
- patmatDebug("eq axioms for: "+ sym.const)
- patmatDebug("excluded: "+ excluded)
- patmatDebug("implied: "+ implied)
-
- // when this symbol is true, what must hold...
- implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
-
+ v.implications foreach { case (sym, implied, excluded) =>
+ // when sym is true, what must hold...
+ implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
// ... and what must not?
- excluded foreach {excludedSym =>
- excludedPair += ExcludedPair(sym.const, excludedSym.const)
- addAxiom(Or(Not(sym), Not(excludedSym)))
- }
+ excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
}
}
patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
patmatDebug("pure:"+ pure.map(p => cnfString(eqFreePropToSolvable(p))).mkString("\n"))
- Statistics.stopTimer(patmatAnaVarEq, start)
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
(eqAxioms, pure)
}
@@ -1986,33 +2040,46 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait CNF extends Logic {
// CNF: a formula is a conjunction of clauses
type Formula = Array[Clause]
+ /** Override Array creation for efficiency (to not go through reflection). */
+ private implicit val formulaTag: scala.reflect.ClassTag[Formula] = new scala.reflect.ClassTag[Formula] {
+ def runtimeClass: java.lang.Class[Formula] = classOf[Formula]
+ final override def newArray(len: Int): Array[Formula] = new Array[Formula](len)
+ }
def formula(c: Clause*): Formula = c.toArray
def andFormula(a: Formula, b: Formula): Formula = a ++ b
// a clause is a disjunction of distinct literals
type Clause = Set[Lit]
def clause(l: Lit*): Clause = l.toSet
- @inline private def merge(a: Clause, b: Clause) = a ++ b
+ private def merge(a: Clause, b: Clause) = a ++ b
type Lit
def Lit(sym: Sym, pos: Boolean = true): Lit
// throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
+ // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
def eqFreePropToSolvable(p: Prop): Formula = {
- // TODO: for now, reusing the normalization from DPLL
- def negationNormalForm(p: Prop): Prop = p match {
- case And(a, b) => And(negationNormalForm(a), negationNormalForm(b))
- case Or(a, b) => Or(negationNormalForm(a), negationNormalForm(b))
- case Not(And(a, b)) => negationNormalForm(Or(Not(a), Not(b)))
- case Not(Or(a, b)) => negationNormalForm(And(Not(a), Not(b)))
- case Not(Not(p)) => negationNormalForm(p)
- case Not(True) => False
- case Not(False) => True
- case True
- | False
- | (_ : Sym)
- | Not(_ : Sym) => p
- }
+ def negationNormalFormNot(p: Prop, budget: Int = AnalysisBudget.max): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Not(p) => negationNormalForm(p, budget - 1)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Not(negated) => negationNormalFormNot(negated, budget - 1)
+ case True
+ | False
+ | (_ : Sym) => p
+ }
val TrueF = formula()
val FalseF = formula(clause())
@@ -2054,18 +2121,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- val start = Statistics.startTimer(patmatCNF)
- val res =
- try {
- conjunctiveNormalForm(negationNormalForm(p))
- } catch { case ex : StackOverflowError =>
- throw AnalysisBudget.stackOverflow
- }
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
+ val res = conjunctiveNormalForm(negationNormalForm(p))
- Statistics.stopTimer(patmatCNF, start)
+ if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
//
- if (Statistics.enabled) patmatCNFSizes(res.size).value += 1
+ if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
res
@@ -2127,8 +2189,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
findAllModels(f, Nil)
}
- @inline private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- @inline private def dropUnit(f: Formula, unitLit: Lit) = {
+ private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
+ private def dropUnit(f: Formula, unitLit: Lit) = {
val negated = -unitLit
// drop entire clauses that are trivially true
// (i.e., disjunctions that contain the literal we're making true in the returned model),
@@ -2142,7 +2204,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
patmatDebug("DPLL\n"+ cnfString(f))
- val start = Statistics.startTimer(patmatAnaDPLL)
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
val satisfiableWithModel: Model =
if (f isEmpty) EmptyModel
@@ -2180,7 +2242,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- Statistics.stopTimer(patmatAnaDPLL, start)
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
satisfiableWithModel
}
@@ -2199,25 +2261,25 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nextId = {_nextId += 1; _nextId}
def resetUniques() = {_nextId = 0; uniques.clear()}
- private val uniques = new collection.mutable.HashMap[Tree, Var]
+ private val uniques = new scala.collection.mutable.HashMap[Tree, Var]
def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
}
class Var(val path: Tree, staticTp: Type) extends AbsVar {
private[this] val id: Int = Var.nextId
// private[this] var canModify: Option[Array[StackTraceElement]] = None
- @inline private[this] def ensureCanModify = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
+ private[this] def ensureCanModify = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
- @inline private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
+ private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
// don't access until all potential equalities have been registered using registerEquality
- private[this] val symForEqualsTo = new collection.mutable.HashMap[Const, Sym]
+ private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym]
// when looking at the domain, we only care about types we can check at run time
val staticTpCheckable: Type = checkableType(staticTp)
private[this] var _mayBeNull = false
- def registerNull: Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
@@ -2244,22 +2306,121 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
observed; allConsts
}
- // accessing after calling registerNull will result in inconsistencies
- lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
-
- lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
-
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
- // don't access until all potential equalities have been registered using registerEquality
- lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
-
// return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
// (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+ // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
+ /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
+ *
+ * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
+ * and the constant pattern yields a ValueConst C
+ *
+ * for exhaustivity, we really only need implication (e.g., V = 1 implies that V = 1 /\ V = Int, if both tests occur in the match,
+ * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
+ */
+ lazy val implications = {
+ /** when we know V = C, which other equalities must hold
+ *
+ * in general, equality to some type implies equality to its supertypes
+ * (this multi-valued kind of equality is necessary for unreachability)
+ * note that we use subtyping as a model for implication between instanceof tests
+ * i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ * unfortunately this is not true in general (see e.g. SI-6022)
+ */
+ def implies(lower: Const, upper: Const): Boolean =
+ // values and null
+ lower == upper ||
+ // type implication
+ (lower != NullConst && !upper.isValue &&
+ instanceOfTpImplies(if (lower.isValue) lower.wideTp else lower.tp, upper.tp))
+
+ // if(r) patmatDebug("implies : "+(lower, lower.tp, upper, upper.tp))
+ // else patmatDebug("NOT implies: "+(lower, upper))
+
+
+ /** does V = C preclude V having value `other`?
+ (1) V = null is an exclusive assignment,
+ (2) V = A and V = B, for A and B value constants, are mutually exclusive unless A == B
+ we err on the safe side, for example:
+ - assume `val X = 1; val Y = 1`, then
+ (2: Int) match { case X => case Y => <falsely considered reachable> }
+ - V = 1 does not preclude V = Int, or V = Any, it could be said to preclude V = String, but we don't model that
+
+ (3) for types we could try to do something fancy, but be conservative and just say no
+ */
+ def excludes(a: Const, b: Const): Boolean =
+ a != b && ((a == NullConst || b == NullConst) || (a.isValue && b.isValue))
+
+ // if(r) patmatDebug("excludes : "+(a, a.tp, b, b.tp))
+ // else patmatDebug("NOT excludes: "+(a, b))
+
+/*
+[ HALF BAKED FANCINESS: //!equalitySyms.exists(common => implies(common.const, a) && implies(common.const, b)))
+ when type tests are involved, we reason (conservatively) under a closed world assumption,
+ since we are really only trying to counter the effects of the symbols that we introduce to model type tests
+ we don't aim to model the whole subtyping hierarchy, simply to encode enough about subtyping to do unreachability properly
+
+ consider the following hierarchy:
+
+ trait A
+ trait B
+ trait C
+ trait AB extends B with A
+
+ // two types are mutually exclusive if there is no equality symbol whose constant implies both
+ object Test extends App {
+ def foo(x: Any) = x match {
+ case _ : C => println("C")
+ case _ : AB => println("AB")
+ case _ : (A with B) => println("AB'")
+ case _ : B => println("B")
+ case _ : A => println("A")
+ }
+
+ of course this kind of reasoning is not true in general,
+ but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
+*/
+
+ val excludedPair = new scala.collection.mutable.HashSet[ExcludedPair]
+
+ case class ExcludedPair(a: Const, b: Const) {
+ override def equals(o: Any) = o match {
+ case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
+ case _ => false
+ }
+ // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
+ override def hashCode = a.hashCode ^ b.hashCode
+ }
+
+ equalitySyms map { sym =>
+ // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
+ // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
+ val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
+ val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const))
+ val implied = notExcluded filter (b => implies(sym.const, b.const))
+
+ patmatDebug("eq axioms for: "+ sym.const)
+ patmatDebug("excluded: "+ excluded)
+ patmatDebug("implied: "+ implied)
+
+ excluded foreach { excludedSym => excludedPair += ExcludedPair(sym.const, excludedSym.const)}
+
+ (sym, implied, excluded)
+ }
+ }
+
+ // accessing after calling registerNull will result in inconsistencies
+ lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+
+ lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
// don't call until all equalities have been registered and registerNull has been called (if needed)
def describe = toString + ": " + staticTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
@@ -2279,7 +2440,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private var _nextValueId = 0
def nextValueId = {_nextValueId += 1; _nextValueId}
- private val uniques = new collection.mutable.HashMap[Type, Const]
+ private val uniques = new scala.collection.mutable.HashMap[Type, Const]
private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
@@ -2293,7 +2454,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
fresh
})
- private val trees = collection.mutable.HashSet.empty[Tree]
+ private val trees = scala.collection.mutable.HashSet.empty[Tree]
// hashconsing trees (modulo value-equality)
private[SymbolicMatchAnalysis] def uniqueTpForTree(t: Tree): Type =
@@ -2315,42 +2476,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- sealed abstract class Const extends AbsConst {
+ sealed abstract class Const {
def tp: Type
- protected def wideTp: Type
+ def wideTp: Type
def isAny = wideTp.typeSymbol == AnyClass
-
- final def implies(other: Const): Boolean = {
- val r = (this, other) match {
- case (_: ValueConst, _: ValueConst) => this == other // hashconsed
- case (_: ValueConst, _: TypeConst) => instanceOfTpImplies(tp, other.tp)
- case (_: TypeConst, _) => instanceOfTpImplies(tp, other.tp)
- case _ => false
- }
- // if(r) patmatDebug("implies : "+(this, other))
- // else patmatDebug("NOT implies: "+(this, other))
- r
- }
-
- // does V = C preclude V having value `other`? V = null is an exclusive assignment,
- // but V = 1 does not preclude V = Int, or V = Any
- final def excludes(other: Const): Boolean = {
- val r = (this, other) match {
- case (_, NullConst) => true
- case (NullConst, _) => true
- // this causes false negative for unreachability, but that's ok:
- // example: val X = 1; val Y = 1; (2: Int) match { case X => case Y => /* considered reachable */ }
- case (_: ValueConst, _: ValueConst) => this != other
- case (_: ValueConst, _: TypeConst) => !(instanceOfTpImplies(tp, other.tp) || instanceOfTpImplies(other.tp, wideTp))
- case (_: TypeConst, _: ValueConst) => !(instanceOfTpImplies(other.tp, tp) || instanceOfTpImplies(tp, other.wideTp))
- case (_: TypeConst, _: TypeConst) => !(instanceOfTpImplies(tp, other.tp) || instanceOfTpImplies(other.tp, tp))
- case _ => false
- }
- // if(r) patmatDebug("excludes : "+(this, this.tp, other, other.tp))
- // else patmatDebug("NOT excludes: "+(this, other))
- r
- }
+ def isValue: Boolean //= tp.isStable
// note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
// the equals inherited from AnyRef does just this
@@ -2362,15 +2493,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
// since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
// (At least conceptually: `true` is an instance of class `Boolean`)
- private def widenToClass(tp: Type) = {
- // getOrElse to err on the safe side -- all BTS should end in Any, right?
- val wideTp = tp.widen
- val clsTp =
- if (wideTp.typeSymbol.isClass) wideTp
- else wideTp.baseTypeSeq.toList.find(_.typeSymbol.isClass).getOrElse(AnyClass.tpe)
- // patmatDebug("Widening to class: "+ (tp, clsTp, tp.widen, tp.widen.baseTypeSeq, tp.widen.baseTypeSeq.toList.find(_.typeSymbol.isClass)))
- clsTp
- }
+ private def widenToClass(tp: Type): Type =
+ if (tp.typeSymbol.isClass) tp
+ else tp.baseType(tp.baseClasses.head)
object TypeConst extends TypeConstExtractor {
def apply(tp: Type) = {
@@ -2387,7 +2512,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
private[this] val id: Int = Const.nextTypeId
val wideTp = widenToClass(tp)
-
+ def isValue = false
override def toString = tp.toString //+"#"+ id
}
@@ -2431,14 +2556,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
// patmatDebug("VC"+(tp, wideTp, toString))
- assert(!(tp =:= NullTp))
+ assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
private[this] val id: Int = Const.nextValueId
+ def isValue = true
}
lazy val NullTp = ConstantType(Constant(null))
case object NullConst extends Const {
def tp = NullTp
- protected def wideTp = NullTp
+ def wideTp = NullTp
def isValue = true
override def toString = "null"
@@ -2477,7 +2603,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// thus, the case is unreachable if there is no model for -(-P /\ C),
// or, equivalently, P \/ -C, or C => P
def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
- val start = Statistics.startTimer(patmatAnaReach)
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null
// use the same approximator so we share variables,
// but need different conditions depending on whether we're conservatively looking for failure or success
@@ -2509,7 +2635,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
var reachable = true
var caseIndex = 0
- patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables) map (_.describe) mkString ("\n")))
+ patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsCNF))
// invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
@@ -2524,14 +2650,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
current = current.tail
val model = findModelFor(andFormula(eqFreePropToSolvable(current.head), prefix))
- // patmatDebug("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
- // if (ok) patmatDebug("reached: "+ modelString(model))
+ // patmatDebug("trying to reach:\n"+ cnfString(eqFreePropToSolvable(current.head)) +"\nunder prefix:\n"+ cnfString(prefix))
+ // if (NoModel ne model) patmatDebug("reached: "+ modelString(model))
- reachable = model ne NoModel
+ reachable = NoModel ne model
}
}
- Statistics.stopTimer(patmatAnaReach, start)
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
if (reachable) None else Some(caseIndex)
} catch {
@@ -2550,7 +2676,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case UnitClass =>
Some(List(UnitClass.tpe))
case BooleanClass =>
- Some(List(ConstantType(Constant(true)), ConstantType(Constant(false))))
+ Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
// TODO case _ if tp.isTupleType => // recurse into component types
case modSym: ModuleClassSymbol =>
Some(List(tp))
@@ -2589,17 +2715,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
def checkableType(tp: Type): Type = {
// TODO: this is extremely rough...
- object toCheckable extends TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(pre, sym, a :: as) if sym ne ArrayClass =>
- // replace type args by existentials, since they can't be checked
- // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
- // TODO: don't reuse sym.typeParams, they have bounds (and those must not be considered)
- newExistentialType(sym.typeParams, sym.tpe).asSeenFrom(pre, sym.owner)
- case _ => mapOver(tp)
+ // replace type args by wildcards, since they can't be checked (don't use existentials: overkill)
+ // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
+ // similar to typer.infer.approximateAbstracts
+ object typeArgsToWildcardsExceptArray extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
+ TypeRef(pre, sym, args map (_ => WildcardType))
+ case _ =>
+ mapOver(tp)
}
}
- val res = toCheckable(tp)
+
+ val res = typeArgsToWildcardsExceptArray(tp)
patmatDebug("checkable "+(tp, res))
res
}
@@ -2607,7 +2735,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
// we consider tuple types with at least one component of a checkable type as a checkable type
def uncheckableType(tp: Type): Boolean = {
- @inline def tupleComponents(tp: Type) = tp.normalize.typeArgs
+ def tupleComponents(tp: Type) = tp.normalize.typeArgs
val checkable = (
(isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
|| enumerateSubtypes(tp).nonEmpty)
@@ -2622,7 +2750,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// - back off (to avoid crying exhaustive too often) when:
// - there are guards -->
// - there are extractor calls (that we can't secretly/soundly) rewrite
- val start = Statistics.startTimer(patmatAnaExhaust)
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null
var backoff = false
val approx = new TreeMakersToConds(prevBinder)
@@ -2674,7 +2802,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
- Statistics.stopTimer(patmatAnaExhaust, start)
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
pruned
} catch {
case ex : AnalysisBudget.Exception =>
@@ -2740,7 +2868,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case object WildcardExample extends CounterExample { override def toString = "_" }
case object NoExample extends CounterExample { override def toString = "??" }
- @inline def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
+ def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
val (trues, falses) = xs.partition(_._2)
(trues map (_._1.const), falses map (_._1.const))
@@ -2787,7 +2915,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
}
- private val uniques = new collection.mutable.HashMap[Var, VariableAssignment]
+ private val uniques = new scala.collection.mutable.HashMap[Var, VariableAssignment]
private def unique(variable: Var): VariableAssignment =
uniques.getOrElseUpdate(variable, {
val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
@@ -2813,9 +2941,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
// node in the tree that describes how to construct a counter-example
- case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: collection.mutable.Map[Symbol, VariableAssignment]) {
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
- private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && (better implies subsumed)))
+ private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
@@ -2846,7 +2974,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
( uniqueEqualTo.nonEmpty
|| (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
- @inline def args(brevity: Boolean = beBrief) = {
+ def args(brevity: Boolean = beBrief) = {
// figure out the constructor arguments from the field assignment
val argLen = (caseFieldAccs.length min ctorParams.length)
@@ -2906,8 +3034,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val testss = approximateMatchConservative(prevBinder, cases)
// interpret:
- val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
- val tested = new collection.mutable.HashSet[Cond]
+ val dependencies = new scala.collection.mutable.LinkedHashMap[Test, Set[Cond]]
+ val tested = new scala.collection.mutable.HashSet[Cond]
def storeDependencies(test: Test) = {
val cond = test.cond
@@ -2955,7 +3083,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// then, collapse these contiguous sequences of reusing tests
// store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
// replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ val reused = new scala.collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
var okToCall = false
val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
@@ -3189,7 +3317,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
var remainingCases = cases
- val collapsed = collection.mutable.ListBuffer.empty[CaseDef]
+ val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
// when some of collapsed cases (except for the default case itself) did not include an un-guarded case
// we'll need to emit a labeldef for the default case
@@ -3324,16 +3452,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case Some(cds) => cds
}
- val allReachable =
- if (unchecked) true
- else {
- val unreachables = unreachableCase(caseDefsWithGuards)
- unreachables foreach {cd => reportUnreachable(cd.body.pos)}
- // a switch with duplicate cases yields a verify error,
- // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
- // (even though the verify error would disappear, the behaviour would change)
- unreachables.isEmpty
- }
+ val allReachable = unchecked || {
+ // a switch with duplicate cases yields a verify error,
+ // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
+ // (even though the verify error would disappear, the behaviour would change)
+ unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
+ }
if (!allReachable) Nil
else if (noGuards(caseDefsWithGuards)) {
@@ -3481,7 +3605,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
val matchEnd = newSynthCaseLabel("matchEnd")
- val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe.withoutAnnotations //
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe.withoutAnnotations
matchEnd setInfo MethodType(List(matchRes), restpe)
def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
@@ -3492,7 +3616,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val nextCase = newCaseSym
_currCase = nextCase
- LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase, restpe)))
+ LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
}
// must compute catchAll after caseLabels (side-effects nextCase)
@@ -3517,14 +3641,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
)
}
- class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol, restpe: Type) extends CommonCodegen with Casegen {
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
// only used to wrap the RHS of a body
// res: T
// returns MatchMonad[T]
- def one(res: Tree): Tree = matchEnd APPLY (_asInstanceOf(res, restpe)) // need cast for GADT magic
+ def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply
protected def zero: Tree = nextCase APPLY ()
// prev: MatchMonad[T]
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 41387b7507..e3f5214581 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -7,10 +7,10 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
-import language.postfixOps
+import scala.language.postfixOps
/** <p>
* Post-attribution checking and transformation.
@@ -38,7 +38,7 @@ import language.postfixOps
*
* @todo Check whether we always check type parameter bounds.
*/
-abstract class RefChecks extends InfoTransform with reflect.internal.transform.RefChecks {
+abstract class RefChecks extends InfoTransform with scala.reflect.internal.transform.RefChecks {
val global: Global // need to repeat here because otherwise last mixin defines global as
// SymbolTable. If we had DOT this would not be an issue
@@ -124,7 +124,11 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
defaultMethodNames.toList.distinct foreach { name =>
val methods = clazz.info.findMember(name, 0L, METHOD, false).alternatives
- val haveDefaults = methods filter (sym => sym.hasParamWhich(_.hasDefault) && !nme.isProtectedAccessorName(sym.name))
+ def hasDefaultParam(tpe: Type): Boolean = tpe match {
+ case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
+ case _ => false
+ }
+ val haveDefaults = methods filter (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name))
if (haveDefaults.lengthCompare(1) > 0) {
val owners = haveDefaults map (_.owner)
@@ -426,6 +430,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
overrideError("cannot override a macro")
} else {
checkOverrideTypes()
+ checkOverrideDeprecated()
if (settings.warnNullaryOverride.value) {
if (other.paramss.isEmpty && !member.paramss.isEmpty) {
unit.warning(member.pos, "non-nullary method overrides nullary method")
@@ -504,6 +509,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
}
+
+ def checkOverrideDeprecated() {
+ if (other.hasDeprecatedOverridingAnnotation) {
+ val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
+ val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(member.pos, msg)
+ }
+ }
}
val opc = new overridingPairs.Cursor(clazz)
@@ -937,9 +950,9 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case TypeBounds(lo, hi) =>
validateVariance(lo, -variance)
validateVariance(hi, variance)
- case MethodType(formals, result) =>
+ case mt @ MethodType(formals, result) =>
if (inRefinement)
- validateVariances(formals map (_.tpe), -variance)
+ validateVariances(mt.paramTypes, -variance)
validateVariance(result, variance)
case NullaryMethodType(result) =>
validateVariance(result, variance)
@@ -1193,6 +1206,23 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case _ =>
}
+ // SI-6276 warn for `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think.
+ def checkInfiniteLoop(valOrDef: ValOrDefDef) {
+ def callsSelf = valOrDef.rhs match {
+ case t @ (Ident(_) | Select(This(_), _)) =>
+ t hasSymbolWhich (_.accessedOrSelf == valOrDef.symbol)
+ case _ => false
+ }
+ val trivialInifiniteLoop = (
+ !valOrDef.isErroneous
+ && !valOrDef.symbol.isValueParameter
+ && valOrDef.symbol.paramss.isEmpty
+ && callsSelf
+ )
+ if (trivialInifiniteLoop)
+ unit.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively")
+ }
+
// Transformation ------------------------------------------------------------
/* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
@@ -1298,13 +1328,12 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
case t if treeInfo.isSelfConstrCall(t) =>
assert(index == 0, index)
- val t = transform(tree)
- if (currentLevel.maxindex > 0) {
+ try transform(tree) :: Nil
+ finally if (currentLevel.maxindex > 0) {
// An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
debuglog("refsym = " + currentLevel.refsym)
unit.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation")
}
- List(t)
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
@@ -1316,11 +1345,11 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
debuglog("refsym = " + currentLevel.refsym)
unit.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
}
- List(tree1)
+ tree1 :: Nil
}
case Import(_, _) => Nil
case DefDef(mods, _, _, _, _, _) if (mods hasFlag MACRO) || (tree.symbol hasFlag MACRO) => Nil
- case _ => List(transform(tree))
+ case _ => transform(tree) :: Nil
}
/* Check whether argument types conform to bounds of type parameters */
@@ -1602,12 +1631,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case NullaryMethodType(restpe) if restpe.typeSymbol == UnitClass =>
// this may be the implementation of e.g. a generic method being parameterized
// on Unit, in which case we had better let it slide.
- if (sym.isGetter || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))) ()
- else unit.warning(sym.pos,
- "side-effecting nullary methods are discouraged: suggest defining as `def %s()` instead".format(
- sym.name.decode)
+ val isOk = (
+ sym.isGetter
+ || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
+ || (sym.name containsName nme.DEFAULT_GETTER_STRING)
)
- case _ => ()
+ if (!isOk)
+ unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
+ case _ => ()
}
// Verify classes extending AnyVal meet the requirements
@@ -1615,6 +1646,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
if (clazz.isTrait)
unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
+ else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+ unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
}
}
@@ -1635,6 +1668,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
+ checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
if (settings.warnNullaryUnit.value)
checkNullaryMethodReturnType(sym)
if (settings.warnInaccessible.value) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 92c50a05c2..981ba10183 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -34,6 +34,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
/** the following two members override abstract members in Transform */
val phaseName: String = "superaccessors"
+ /** The following flags may be set by this phase: */
+ override def phaseNewFlags: Long = notPRIVATE
+
protected def newTransformer(unit: CompilationUnit): Transformer =
new SuperAccTransformer(unit)
@@ -67,7 +70,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
acc
}
-
+
atPos(sel.pos)(Select(gen.mkAttributedThis(clazz), superAcc) setType sel.tpe)
}
@@ -167,6 +170,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
treeCopy.CaseDef(tree, pat, transform(guard), transform(body))
case ClassDef(_, _, _, _) =>
+ def transformClassDef = {
checkCompanionNameClashes(sym)
val decls = sym.info.decls
for (s <- decls) {
@@ -192,10 +196,15 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
super.transform(tree)
+ }
+ transformClassDef
+
case ModuleDef(_, _, _) =>
checkCompanionNameClashes(sym)
super.transform(tree)
+
case Template(_, _, body) =>
+ def transformTemplate = {
val ownAccDefs = new ListBuffer[Tree]
accDefs(currentOwner) = ownAccDefs
@@ -208,6 +217,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
accDefs -= currentOwner
ownAccDefs ++= body1
deriveTemplate(tree)(_ => ownAccDefs.toList)
+ }
+ transformTemplate
case TypeApply(sel @ Select(This(_), name), args) =>
mayNeedProtectedAccessor(sel, args, false)
@@ -221,72 +232,90 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
typeDef.symbol.deSkolemize.setFlag(SPECIALIZED)
typeDef
- case sel @ Select(qual @ This(_), name) =>
- // warn if they are selecting a private[this] member which
- // also exists in a superclass, because they may be surprised
- // to find out that a constructor parameter will shadow a
- // field. See SI-4762.
- if (settings.lint.value) {
- if (sym.isPrivateLocal && sym.paramss.isEmpty) {
- qual.symbol.ancestors foreach { parent =>
- parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 =>
- if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) {
- unit.warning(sel.pos,
- sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name
- + " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within "
- + sym.owner + " - you may want to give them distinct names."
- )
+ case sel @ Select(qual, name) =>
+ def transformSelect = {
+ /** return closest enclosing method, unless shadowed by an enclosing class;
+ * no use of closures here in the interest of speed.
+ */
+ def closestEnclMethod(from: Symbol): Symbol =
+ if (from.isSourceMethod) from
+ else if (from.isClass) NoSymbol
+ else closestEnclMethod(from.owner)
+
+ if (closestEnclMethod(currentOwner) hasAnnotation definitions.ScalaInlineClass)
+ sym.makeNotPrivate(sym.owner)
+
+ qual match {
+ case This(_) =>
+ // warn if they are selecting a private[this] member which
+ // also exists in a superclass, because they may be surprised
+ // to find out that a constructor parameter will shadow a
+ // field. See SI-4762.
+ if (settings.lint.value) {
+ if (sym.isPrivateLocal && sym.paramss.isEmpty) {
+ qual.symbol.ancestors foreach { parent =>
+ parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 =>
+ if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) {
+ unit.warning(sel.pos,
+ sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name
+ + " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within "
+ + sym.owner + " - you may want to give them distinct names.")
+ }
+ }
}
}
}
- }
- }
- // direct calls to aliases of param accessors to the superclass in order to avoid
- // duplicating fields.
- if (sym.isParamAccessor && sym.alias != NoSymbol) {
- val result = (localTyper.typedPos(tree.pos) {
- Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
- }).asInstanceOf[Select]
- debuglog("alias replacement: " + tree + " ==> " + result);//debug
- localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
- }
- else {
- /** A trait which extends a class and accesses a protected member
- * of that class cannot implement the necessary accessor method
- * because its implementation is in an implementation class (e.g.
- * Foo$class) which inherits nothing, and jvm access restrictions
- * require the call site to be in an actual subclass. So non-trait
- * classes inspect their ancestors for any such situations and
- * generate the accessors. See SI-2296.
- */
- // FIXME - this should be unified with needsProtectedAccessor, but some
- // subtlety which presently eludes me is foiling my attempts.
- val shouldEnsureAccessor = (
- currentClass.isTrait
- && sym.isProtected
- && sym.enclClass != currentClass
- && !sym.owner.isTrait
- && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol)
- )
- if (shouldEnsureAccessor) {
- log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
- ensureAccessor(sel)
- }
- else
- mayNeedProtectedAccessor(sel, List(EmptyTree), false)
- }
+ // direct calls to aliases of param accessors to the superclass in order to avoid
+ // duplicating fields.
+ if (sym.isParamAccessor && sym.alias != NoSymbol) {
+ val result = (localTyper.typedPos(tree.pos) {
+ Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
+ }).asInstanceOf[Select]
+ debuglog("alias replacement: " + tree + " ==> " + result); //debug
+ localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+ } else {
+ /**
+ * A trait which extends a class and accesses a protected member
+ * of that class cannot implement the necessary accessor method
+ * because its implementation is in an implementation class (e.g.
+ * Foo$class) which inherits nothing, and jvm access restrictions
+ * require the call site to be in an actual subclass. So non-trait
+ * classes inspect their ancestors for any such situations and
+ * generate the accessors. See SI-2296.
+ */
+ // FIXME - this should be unified with needsProtectedAccessor, but some
+ // subtlety which presently eludes me is foiling my attempts.
+ val shouldEnsureAccessor = (
+ currentClass.isTrait
+ && sym.isProtected
+ && sym.enclClass != currentClass
+ && !sym.owner.isTrait
+ && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
+ && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && !needsProtectedAccessor(sym, tree.pos))
+ if (shouldEnsureAccessor) {
+ log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
+ ensureAccessor(sel)
+ }
+ else
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
+ }
+
+ case Super(_, mix) =>
+ if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
+ if (!settings.overrideVars.value)
+ unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf)
+ } else if (isDisallowed(sym)) {
+ unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
+ }
+ transformSuperSelect(sel)
- case sel @ Select(Super(_, mix), name) =>
- if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
- if (!settings.overrideVars.value)
- unit.error(tree.pos, "super may be not be used on "+ sym.accessedOrSelf)
+ case _ =>
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, true)
}
- else if (isDisallowed(sym)) {
- unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
}
- transformSuperSelect(sel)
+ transformSelect
case DefDef(mods, name, tparams, vparamss, tpt, rhs) if tree.symbol.isMethodWithExtension =>
treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
@@ -294,10 +323,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case TypeApply(sel @ Select(qual, name), args) =>
mayNeedProtectedAccessor(sel, args, true)
- case sel @ Select(qual, name) =>
- mayNeedProtectedAccessor(sel, List(EmptyTree), true)
-
case Assign(lhs @ Select(qual, name), rhs) =>
+ def transformAssign = {
if (lhs.symbol.isVariable &&
lhs.symbol.isJavaDefined &&
needsProtectedAccessor(lhs.symbol, tree.pos)) {
@@ -307,14 +334,18 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transform(localTyper.typed(Apply(setter, List(qual, rhs))))
} else
super.transform(tree)
+ }
+ transformAssign
case Apply(fn, args) =>
assert(fn.tpe != null, tree)
treeCopy.Apply(tree, transform(fn), transformArgs(fn.tpe.params, args))
+
case Function(vparams, body) =>
withInvalidOwner {
treeCopy.Function(tree, vparams, transform(body))
}
+
case _ =>
super.transform(tree)
}
@@ -328,9 +359,22 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- override def atOwner[A](owner: Symbol)(trans: => A): A = {
+ /** a typer for each enclosing class */
+ private var typers = immutable.Map[Symbol, analyzer.Typer]()
+
+ /** Specialized here for performance; the previous blanked
+ * introduction of typers in TypingTransformer caused a >5%
+ * performance hit for the compiler as a whole.
+ */
+ override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
if (owner.isClass) validCurrentOwner = true
- super.atOwner(owner)(trans)
+ val savedLocalTyper = localTyper
+ localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
+ typers = typers updated (owner, localTyper)
+ val result = super.atOwner(tree, owner)(trans)
+ localTyper = savedLocalTyper
+ typers -= owner
+ result
}
private def withInvalidOwner[A](trans: => A): A = {
@@ -483,7 +527,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
)
true
}
- isCandidate && !host.isPackageClass && !isSelfType
+ def isJavaProtected = host.isTrait && sym.isJavaDefined && {
+ restrictionError(pos, unit,
+ s"""|$clazz accesses protected $sym inside a concrete trait method.
+ |Add an accessor in a class extending ${sym.enclClass} as a workaround.""".stripMargin
+ )
+ true
+ }
+ isCandidate && !host.isPackageClass && !isSelfType && !isJavaProtected
}
/** Return the innermost enclosing class C of referencingClass for which either
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index c7c9d2f4aa..67afb0c118 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -63,7 +63,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// in the original order.
def accessors = clazz.caseFieldAccessors sortBy { acc =>
originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name append "$").asInstanceOf[Name]) // [Eugene++] why do we need this cast?
+ (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
}
}
val arity = accessors.size
@@ -87,7 +87,7 @@ trait SyntheticMethods extends ast.TreeDSL {
)
def forwardToRuntime(method: Symbol): Tree =
- forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_").asInstanceOf[Name]))(mkThis :: _) // [Eugene++] why do we need this cast?
+ forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _)
def callStaticsMethod(name: String)(args: Tree*): Tree = {
val method = termMember(RuntimeStaticsModule, name)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index f82e009be8..d82fbd7c77 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -26,7 +26,7 @@ trait Tags {
/** Finds in scope or materializes a ClassTag.
* Should be used instead of ClassManifest every time compiler needs to persist an erasure.
*
- * Once upon a time, we had an `ErasureTag` which was to `ClassTag` the same that `AbsTypeTag` is for `TypeTag`.
+ * Once upon a time, we had an `ErasureTag` which was to `ClassTag` the same that `WeakTypeTag` is for `TypeTag`.
* However we found out that we don't really need this concept, so it got removed.
*
* @param pos Position for error reporting. Please, provide meaningful value.
@@ -43,13 +43,13 @@ trait Tags {
resolveTag(pos, taggedTp, allowMaterialization)
}
- /** Finds in scope or materializes an AbsTypeTag (if `concrete` is false) or a TypeTag (if `concrete` is true).
+ /** Finds in scope or materializes an WeakTypeTag (if `concrete` is false) or a TypeTag (if `concrete` is true).
*
* @param pos Position for error reporting. Please, provide meaningful value.
* @param pre Prefix that represents a universe this type tag will be bound to.
* If `pre` is set to `NoType`, then any type tag in scope will do, regardless of its affiliation.
- * If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkBasisPrefix` will be used.
- * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, reflectBasisPrefix, IntClass.tpe, false) will look for scala.reflect.basis.TypeTag[Int].
+ * If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkRuntimeUniverseRef` will be used.
+ * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntClass.tpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
* @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
* If false then the function will always succeed (abstract types will be reified as free types).
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
@@ -59,11 +59,14 @@ trait Tags {
* EmptyTree if `concrete` is true and the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
* EmptyTree if `allowMaterialization` is false, and there is no array tag in scope.
*/
- def resolveTypeTag(pos: Position, pre: Type, tp: Type, concrete: Boolean, allowMaterialization: Boolean = true): Tree = {
- val tagSym = if (concrete) TypeTagClass else AbsTypeTagClass
- val tagTp = if (pre == NoType) TypeRef(BaseUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
- val taggedTp = appliedType(tagTp, List(tp))
- resolveTag(pos, taggedTp, allowMaterialization)
- }
+ def resolveTypeTag(pos: Position, pre: Type, tp: Type, concrete: Boolean, allowMaterialization: Boolean = true): Tree =
+ // if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail
+ if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree
+ else {
+ val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass
+ val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
+ val taggedTp = appliedType(tagTp, List(tp))
+ resolveTag(pos, taggedTp, allowMaterialization)
+ }
}
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 07d457b17b..9bb88f152a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -242,7 +242,7 @@ abstract class TreeCheckers extends Analyzer {
}
}
case ValDef(_, _, _, _) =>
- if (sym.hasGetter && !sym.isOuterField) {
+ if (sym.hasGetter && !sym.isOuterField && !sym.isOuterAccessor) {
assertFn(sym.getter(sym.owner) != NoSymbol, ownerstr(sym) + " has getter but cannot be found. " + sym.ownerChain)
}
case Apply(fn, args) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4a0977eb90..e5c0f5767c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -59,6 +59,19 @@ trait TypeDiagnostics {
* the map, the addendum should also be printed.
*/
private var addendums = perRunCaches.newMap[Position, () => String]()
+ private var isTyperInPattern = false
+
+ /** Devising new ways of communicating error info out of
+ * desperation to work on error messages. This is used
+ * by typedPattern to wrap its business so we can generate
+ * a sensible error message when things go south.
+ */
+ def typingInPattern[T](body: => T): T = {
+ val saved = isTyperInPattern
+ isTyperInPattern = true
+ try body
+ finally isTyperInPattern = saved
+ }
def setAddendum(pos: Position, msg: () => String) =
if (pos != NoPosition)
@@ -138,13 +151,17 @@ trait TypeDiagnostics {
def hasParams = tree.tpe.paramSectionCount > 0
def preResultString = if (hasParams) ": " else " of type "
- def nullMessage = "expression of type " + tree.tpe
- def overloadedMessage = "overloaded method " + sym + " with alternatives:\n" + alternativesString(tree)
+ def patternMessage = "pattern " + tree.tpe.finalResultType + valueParamsString(tree.tpe)
+ def exprMessage = "expression of type " + tree.tpe
+ def overloadedMessage = s"overloaded method $sym with alternatives:\n" + alternativesString(tree)
def moduleMessage = "" + sym
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if (sym == null) nullMessage
+ if ((sym eq null) || (sym eq NoSymbol)) {
+ if (isTyperInPattern) patternMessage
+ else exprMessage
+ }
else if (sym.isOverloaded) overloadedMessage
else if (sym.isModule) moduleMessage
else if (sym.name == nme.apply) applyMessage
@@ -252,6 +269,13 @@ trait TypeDiagnostics {
}
"" // no elaborable variance situation found
}
+
+ // For found/required errors where AnyRef would have sufficed:
+ // explain in greater detail.
+ def explainAnyVsAnyRef(found: Type, req: Type): String = {
+ if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+ }
+
// TODO - figure out how to avoid doing any work at all
// when the message will never be seen. I though context.reportErrors
// being false would do that, but if I return "<suppressed>" under
@@ -261,7 +285,10 @@ trait TypeDiagnostics {
";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
"\n required: " + req + existentialContext(req) + explainAlias(req)
)
- withDisambiguation(Nil, found, req)(baseMessage) + explainVariance(found, req)
+ ( withDisambiguation(Nil, found, req)(baseMessage)
+ + explainVariance(found, req)
+ + explainAnyVsAnyRef(found, req)
+ )
}
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
@@ -462,7 +489,6 @@ trait TypeDiagnostics {
case CyclicReference(sym, info: TypeCompleter) =>
if (context0.owner.isTermMacro) {
// see comments to TypeSigError for an explanation of this special case
- // [Eugene] is there a better way?
throw ex
} else {
val pos = info.tree match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8ba2d9e0fd..fa7e756e36 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,10 +13,9 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable
-import scala.reflect.internal.util.BatchSourceFile
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
import mutable.ListBuffer
import symtab.Flags._
-import reflect.internal.util.Statistics
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -51,7 +50,6 @@ trait Typers extends Modes with Adaptations with Tags {
transformed.clear()
}
- // [Eugene] shouldn't this be converted to resetAllAttrs?
object UnTyper extends Traverser {
override def traverse(tree: Tree) = {
if (tree != EmptyTree) tree.tpe = null
@@ -93,7 +91,7 @@ trait Typers extends Modes with Adaptations with Tags {
// - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
// - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
// this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
- @inline private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+ private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
import context0.unit
@@ -105,7 +103,8 @@ trait Typers extends Modes with Adaptations with Tags {
tp.isError || pt.isError ||
context0.implicitsEnabled && // this condition prevents chains of views
inferView(EmptyTree, tp, pt, false) != EmptyTree
- }}
+ }
+ }
/** Find implicit arguments and pass them to given tree.
*/
@@ -195,16 +194,14 @@ trait Typers extends Modes with Adaptations with Tags {
case PolyType(_, _) => EmptyTree
case _ =>
def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(List(from), to), reportAmbiguous, true, context, saveErrors)
+ val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors)
if (result.subst != EmptyTreeTypeSubstituter) {
result.subst traverse tree
notifyUndetparamsInferred(result.subst.from, result.subst.to)
}
result.tree
}
- val result = wrapImplicit(from)
- if (result != EmptyTree) result
- else wrapImplicit(byNameType(from))
+ wrapImplicit(from) orElse wrapImplicit(byNameType(from))
}
}
@@ -235,10 +232,11 @@ trait Typers extends Modes with Adaptations with Tags {
* @param tree ...
* @return ...
*/
- def checkStable(tree: Tree): Tree =
+ def checkStable(tree: Tree): Tree = (
if (treeInfo.isExprSafeToInline(tree)) tree
else if (tree.isErrorTyped) tree
else UnstableTreeError(tree)
+ )
/** Would tree be a stable (i.e. a pure expression) if the type
* of its symbol was not volatile?
@@ -355,7 +353,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (formals exists (isRepeatedParamType(_)))
error(pos, "methods with `*`-parameters cannot be converted to function values");
*/
- if (restpe.isDependent)
+ if (tpe.isDependentMethodType)
DependentMethodTpeConversionToFunctionError(tree, tpe)
checkParamsConvertible(tree, restpe)
case _ =>
@@ -480,7 +478,6 @@ trait Typers extends Modes with Adaptations with Tags {
/** The typer for an expression, depending on where we are. If we are before a superclass
* call, this is a typer over a constructor context; otherwise it is the current typer.
*/
- @inline
final def constrTyperIf(inConstr: Boolean): Typer =
if (inConstr) {
assert(context.undetparams.isEmpty, context.undetparams)
@@ -580,7 +577,7 @@ trait Typers extends Modes with Adaptations with Tags {
// to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
def dealias(sym: Symbol) =
- (atPos(tree.pos) {gen.mkAttributedRef(sym)}, sym.owner.thisType)
+ (atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
case nme.List => return dealias(ListModule)
case nme.Seq => return dealias(SeqModule)
@@ -609,21 +606,23 @@ trait Typers extends Modes with Adaptations with Tags {
/** Is `sym` defined in package object of package `pkg`?
*/
- private def isInPackageObject(sym: Symbol, pkg: Symbol) =
- pkg.isPackageClass && {
- sym.alternatives forall { sym =>
- !sym.owner.isPackage && {
- sym.owner.isPackageObjectClass &&
+ private def isInPackageObject(sym: Symbol, pkg: Symbol) = {
+ def isInPkgObj(sym: Symbol) =
+ !sym.owner.isPackage && {
+ sym.owner.isPackageObjectClass &&
sym.owner.owner == pkg ||
pkg.isInitialized && {
// need to be careful here to not get a cyclic reference during bootstrap
val pkgobj = pkg.info.member(nme.PACKAGEkw)
pkgobj.isInitialized &&
- (pkgobj.info.member(sym.name).alternatives contains sym)
+ (pkgobj.info.member(sym.name).alternatives contains sym)
}
- }
}
+ pkg.isPackageClass && {
+ if (sym.isOverloaded) sym.alternatives forall isInPkgObj
+ else isInPkgObj(sym)
}
+ }
/** Post-process an identifier or selection node, performing the following:
* 1. Check that non-function pattern expressions are stable
@@ -706,15 +705,15 @@ trait Typers extends Modes with Adaptations with Tags {
def silent[T](op: Typer => T,
reportAmbiguousErrors: Boolean = context.ambiguousErrors,
newtree: Tree = context.tree): SilentResult[T] = {
- val rawTypeStart = Statistics.startCounter(rawTypeFailed)
- val findMemberStart = Statistics.startCounter(findMemberFailed)
- val subtypeStart = Statistics.startCounter(subtypeFailed)
- val failedSilentStart = Statistics.startTimer(failedSilentNanos)
+ val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeFailed) else null
+ val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberFailed) else null
+ val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeFailed) else null
+ val failedSilentStart = if (Statistics.canEnable) Statistics.startTimer(failedSilentNanos) else null
def stopStats() = {
- Statistics.stopCounter(rawTypeFailed, rawTypeStart)
- Statistics.stopCounter(findMemberFailed, findMemberStart)
- Statistics.stopCounter(subtypeFailed, subtypeStart)
- Statistics.stopTimer(failedSilentNanos, failedSilentStart)
+ if (Statistics.canEnable) Statistics.stopCounter(rawTypeFailed, rawTypeStart)
+ if (Statistics.canEnable) Statistics.stopCounter(findMemberFailed, findMemberStart)
+ if (Statistics.canEnable) Statistics.stopCounter(subtypeFailed, subtypeStart)
+ if (Statistics.canEnable) Statistics.stopTimer(failedSilentNanos, failedSilentStart)
}
try {
if (context.reportErrors ||
@@ -778,7 +777,7 @@ trait Typers extends Modes with Adaptations with Tags {
var raw = featureDesc + " " + req + " be enabled\n" +
"by making the implicit value language." + featureName + " visible."
if (!(currentRun.reportedFeature contains featureTrait))
- raw += "\nThis can be achieved by adding the import clause 'import language." + featureName + "'\n" +
+ raw += "\nThis can be achieved by adding the import clause 'import scala.language." + featureName + "'\n" +
"or by setting the compiler option -language:" + featureName + ".\n" +
"See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
"why the feature " + req + " be explicitly enabled."
@@ -885,7 +884,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
checkParamsConvertible(tree, tree.tpe)
- val tree0 = etaExpand(context.unit, tree)
+ val tree0 = etaExpand(context.unit, tree, this)
// println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
if (context.undetparams.nonEmpty) {
@@ -909,7 +908,7 @@ trait Typers extends Modes with Adaptations with Tags {
def adaptType(): Tree = {
if (inFunMode(mode)) {
- // [Eugene++] the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
+ // todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
// because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
// but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
// tree setType tree.tpe.normalize
@@ -1056,7 +1055,7 @@ trait Typers extends Modes with Adaptations with Tags {
case other =>
other
}
- typed(atPos(tree.pos)(Select(qual, nme.apply)), mode, pt)
+ typed(atPos(tree.pos)(Select(qual setPos tree.pos.makeTransparent, nme.apply)), mode, pt)
}
// begin adapt
@@ -1107,10 +1106,12 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
def applyPossible = {
def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- if ((mode & TAPPmode) != 0)
- tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if ((mode & TAPPmode) != 0)
+ tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
+ else
+ applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ )
}
if (tree.isType)
adaptType()
@@ -1134,109 +1135,123 @@ trait Typers extends Modes with Adaptations with Tags {
} else if (tree.tpe <:< pt) {
tree
} else {
- if (inPatternMode(mode)) {
- if ((tree.symbol ne null) && tree.symbol.isModule)
- inferModulePattern(tree, pt)
- if (isPopulated(tree.tpe, approximateAbstracts(pt)))
- return tree
- }
- val tree1 = constfold(tree, pt) // (10) (11)
- if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
- else {
- if (inExprModeButNot(mode, FUNmode)) {
- pt.normalize match {
- case TypeRef(_, sym, _) =>
- // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
- // infinite expansion if pt is constant type ()
- if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
- if (settings.warnValueDiscard.value)
- context.unit.warning(tree.pos, "discarded non-Unit value")
- return typed(atPos(tree.pos)(Block(List(tree), Literal(Constant()))), mode, pt)
- } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
- if (settings.warnNumericWiden.value)
- context.unit.warning(tree.pos, "implicit numeric widening")
- return typed(atPos(tree.pos)(Select(tree, "to" + sym.name)), mode, pt)
- }
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
- case _ =>
- }
- if (!context.undetparams.isEmpty) {
- return instantiate(tree, mode, pt)
- }
- if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
- // (14); the condition prevents chains of views
- debuglog("inferring view from " + tree.tpe + " to " + pt)
- val coercion = inferView(tree, tree.tpe, pt, true)
- // convert forward views of delegate types into closures wrapped around
- // the delegate's apply method (the "Invoke" method, which was translated into apply)
- if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
- val meth: Symbol = tree.tpe.member(nme.apply)
- debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
- return typed(Select(tree, meth), mode, pt)
+ def fallBack: Tree = {
+ if (inPatternMode(mode)) {
+ if ((tree.symbol ne null) && tree.symbol.isModule)
+ inferModulePattern(tree, pt)
+ if (isPopulated(tree.tpe, approximateAbstracts(pt)))
+ return tree
+ }
+ val tree1 = constfold(tree, pt) // (10) (11)
+ if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
+ else {
+ if (inExprModeButNot(mode, FUNmode)) {
+ pt.normalize match {
+ case TypeRef(_, sym, _) =>
+ // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
+ // infinite expansion if pt is constant type ()
+ if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
+ if (settings.warnValueDiscard.value)
+ context.unit.warning(tree.pos, "discarded non-Unit value")
+ return typed(atPos(tree.pos)(Block(List(tree), Literal(Constant()))), mode, pt)
+ } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
+ if (settings.warnNumericWiden.value)
+ context.unit.warning(tree.pos, "implicit numeric widening")
+ return typed(atPos(tree.pos)(Select(tree, "to" + sym.name)), mode, pt)
+ }
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, mode, pt), mode, pt)
+ case _ =>
+ }
+ if (!context.undetparams.isEmpty) {
+ return instantiate(tree, mode, pt)
}
- if (coercion != EmptyTree) {
- def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
- if (settings.logImplicitConv.value)
- unit.echo(tree.pos, msg)
-
- debuglog(msg)
- val silentContext = context.makeImplicit(context.ambiguousErrors)
- val res = newTyper(silentContext).typed(
- new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
- if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
+ if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
+ // (14); the condition prevents chains of views
+ debuglog("inferring view from " + tree.tpe + " to " + pt)
+ val coercion = inferView(tree, tree.tpe, pt, true)
+ // convert forward views of delegate types into closures wrapped around
+ // the delegate's apply method (the "Invoke" method, which was translated into apply)
+ if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
+ val meth: Symbol = tree.tpe.member(nme.apply)
+ debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
+ return typed(Select(tree, meth), mode, pt)
+ }
+ if (coercion != EmptyTree) {
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv.value)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val res = newTyper(silentContext).typed(
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
+ }
}
}
- }
- if (settings.debug.value) {
- log("error tree = " + tree)
- if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
- }
+ if (settings.debug.value) {
+ log("error tree = " + tree)
+ if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
+ }
- val found = tree.tpe
- if (!found.isErroneous && !pt.isErroneous) {
- if (!context.reportErrors && isPastTyper) {
- val (bound, req) = pt match {
- case ExistentialType(qs, tpe) => (qs, tpe)
- case _ => (Nil, pt)
- }
- val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
- if (boundOrSkolems.nonEmpty) {
- // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
- // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
- //
- // val x: T = expr
- //
- // where T is the type of expr, but T contains existential skolems ts.
- // In that case, this value definition does not typecheck.
- // The value definition
- //
- // val x: T forSome { ts } = expr
- //
- // would typecheck. Or one can simply leave out the type of the `val`:
- //
- // val x = expr
- //
- // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
- // type is an existential type.
- //
- // The reason for both failures have to do with the way we (don't) transform
- // skolem types along with the trees that contain them. We'd need a
- // radically different approach to do it. But before investing a lot of time to
- // to do this (I have already sunk 3 full days with in the end futile attempts
- // to consistently transform skolems and fix 6029), I'd like to
- // investigate ways to avoid skolems completely.
- //
- log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
+ val found = tree.tpe
+ if (!found.isErroneous && !pt.isErroneous) {
+ if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
+ val (bound, req) = pt match {
+ case ExistentialType(qs, tpe) => (qs, tpe)
+ case _ => (Nil, pt)
+ }
+ val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
+ if (boundOrSkolems.nonEmpty) {
+ // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+ // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+ // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ //
+ // val x: T = expr
+ //
+ // where T is the type of expr, but T contains existential skolems ts.
+ // In that case, this value definition does not typecheck.
+ // The value definition
+ //
+ // val x: T forSome { ts } = expr
+ //
+ // would typecheck. Or one can simply leave out the type of the `val`:
+ //
+ // val x = expr
+ //
+ // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
+ // type is an existential type.
+ //
+ // The reason for both failures have to do with the way we (don't) transform
+ // skolem types along with the trees that contain them. We'd need a
+ // radically different approach to do it. But before investing a lot of time to
+ // to do this (I have already sunk 3 full days with in the end futile attempts
+ // to consistently transform skolems and fix 6029), I'd like to
+ // investigate ways to avoid skolems completely.
+ //
+ // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
+ // (which is the return type of the macro definition instantiated in the context of expandee):
+ //
+ // Test.scala:2: error: type mismatch;
+ // found : $u.Expr[Class[_ <: Object]]
+ // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
+ // scala.reflect.runtime.universe.reify(new Object().getClass)
+ // ^
+ // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
+ // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
+ //
+ log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
+ return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
+ }
}
+ // create an actual error
+ AdaptTypeError(tree, found, pt)
}
- // create an actual error
- AdaptTypeError(tree, found, pt)
+ setError(tree)
}
- setError(tree)
}
+ fallBack
}
}
}
@@ -1390,8 +1405,10 @@ trait Typers extends Modes with Adaptations with Tags {
case List(acc) =>
def isUnderlyingAcc(sym: Symbol) =
sym == acc || acc.hasAccessorFlag && sym == acc.accessed
- if (acc.accessBoundary(clazz) != rootMirror.RootClass)
+ if (acc.accessBoundary(clazz) != rootMirror.RootClass)
unit.error(acc.pos, "value class needs to have a publicly accessible val parameter")
+ else if (acc.tpe.typeSymbol.isDerivedValueClass)
+ unit.error(acc.pos, "value class may not wrap another user-defined value class")
for (stat <- body)
if (!treeInfo.isAllowedInUniversalTrait(stat) && !isUnderlyingAcc(stat.symbol))
unit.error(stat.pos,
@@ -1401,14 +1418,15 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(clazz.pos, "value class needs to have exactly one public val parameter")
}
}
- body foreach {
- case md: ModuleDef =>
- unit.error(md.pos, "value class may not have nested module definitions")
- case cd: ClassDef =>
- unit.error(cd.pos, "value class may not have nested class definitions")
- case md: DefDef if md.symbol.isConstructor && !md.symbol.isPrimaryConstructor =>
- unit.error(md.pos, "value class may not have secondary constructors")
- case _ =>
+
+ def valueClassMayNotHave(at: Tree, what: String) = unit.error(at.pos, s"value class may not have $what")
+ body.foreach {
+ case dd: DefDef if dd.symbol.isAuxiliaryConstructor => valueClassMayNotHave(dd, "secondary constructors")
+ case t => t.foreach {
+ case md: ModuleDef => valueClassMayNotHave(md, "nested module definitions")
+ case cd: ClassDef => valueClassMayNotHave(cd, "nested class definitions")
+ case _ =>
+ }
}
for (tparam <- clazz.typeParams)
if (tparam hasAnnotation definitions.SpecializedClass)
@@ -1551,7 +1569,7 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def validateParentClasses(parents: List[Tree], selfType: Type) {
val pending = ListBuffer[AbsTypeError]()
- @inline def validateDynamicParent(parent: Symbol) =
+ def validateDynamicParent(parent: Symbol) =
if (parent == DynamicClass) checkFeature(parent.pos, DynamicsFeature)
def validateParentClass(parent: Tree, superclazz: Symbol) =
@@ -1573,6 +1591,12 @@ trait Typers extends Modes with Adaptations with Tags {
if (psym.isFinal)
pending += ParentFinalInheritanceError(parent, psym)
+ if (psym.hasDeprecatedInheritanceAnnotation) {
+ val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
+ val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(parent.pos, msg)
+ }
+
if (psym.isSealed && !phase.erasedTypes)
if (context.unit.source.file == psym.sourceFile)
psym addChild context.owner
@@ -1867,7 +1891,7 @@ trait Typers extends Modes with Adaptations with Tags {
* @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
- log("computing param aliases for "+clazz+":"+clazz.primaryConstructor.tpe+":"+rhs)//debug
+ debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
def decompose(call: Tree): (Tree, List[Tree]) = call match {
case Apply(fn, args) =>
val (superConstr, args1) = decompose(fn)
@@ -1965,29 +1989,44 @@ trait Typers extends Modes with Adaptations with Tags {
* - the self-type of the refinement
* - a type member of the refinement
* - an abstract type declared outside of the refinement.
+ * - an instance of a value class
+ * Furthermore, the result type may not be a value class either
*/
- def checkMethodStructuralCompatible(meth: Symbol): Unit = {
- def fail(msg: String) = unit.error(meth.pos, msg)
+ def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
+ val meth = ddef.symbol
+ def fail(pos: Position, msg: String) = unit.error(pos, msg)
val tp: Type = meth.tpe match {
case mt @ MethodType(_, _) => mt
case NullaryMethodType(restpe) => restpe // TODO_NMT: drop NullaryMethodType from resultType?
case PolyType(_, restpe) => restpe
case _ => NoType
}
+ def nthParamPos(n: Int) = ddef.vparamss match {
+ case xs :: _ if xs.length > n => xs(n).pos
+ case _ => meth.pos
+ }
+ def failStruct(pos: Position, what: String, where: String = "Parameter") =
+ fail(pos, s"$where type in structural refinement may not refer to $what")
- for (paramType <- tp.paramTypes) {
+ foreachWithIndex(tp.paramTypes) { (paramType, idx) =>
val sym = paramType.typeSymbol
+ def paramPos = nthParamPos(idx)
if (sym.isAbstractType) {
if (!sym.hasTransOwner(meth.owner))
- fail("Parameter type in structural refinement may not refer to an abstract type defined outside that refinement")
+ failStruct(paramPos, "an abstract type defined outside that refinement")
else if (!sym.hasTransOwner(meth))
- fail("Parameter type in structural refinement may not refer to a type member of that refinement")
+ failStruct(paramPos, "a type member of that refinement")
}
+ if (sym.isDerivedValueClass)
+ failStruct(paramPos, "a user-defined value class")
if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- fail("Parameter type in structural refinement may not refer to the type of that refinement (self type)")
+ failStruct(paramPos, "the type of that refinement (self type)")
}
+ if (tp.resultType.typeSymbol.isDerivedValueClass)
+ failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result")
}
+
def typedUseCase(useCase: UseCase) {
def stringParser(str: String): syntaxAnalyzer.Parser = {
val file = new BatchSourceFile(context.unit.source.file, str) {
@@ -2104,7 +2143,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
if (meth.isStructuralRefinementMember)
- checkMethodStructuralCompatible(meth)
+ checkMethodStructuralCompatible(ddef)
if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
case List(param) :: _ if !param.isImplicit =>
@@ -2391,7 +2430,7 @@ trait Typers extends Modes with Adaptations with Tags {
else targs.init
def mkParams(methodSym: Symbol, formals: List[Type] = deriveFormals) =
- if (formals.isEmpty) { MissingParameterTypeAnonMatchError(tree, pt); Nil }
+ if (formals.isEmpty || !formals.forall(isFullyDefined)) { MissingParameterTypeAnonMatchError(tree, pt); Nil }
else methodSym newSyntheticValueParams formals
def mkSel(params: List[Symbol]) =
@@ -2472,7 +2511,7 @@ trait Typers extends Modes with Adaptations with Tags {
match_ setType B1.tpe
// the default uses applyOrElse's first parameter since the scrut's type has been widened
- val body = methodBodyTyper.virtualizedMatch(match_ addAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x))), mode, B1.tpe)
DefDef(methodSym, body)
}
@@ -2490,7 +2529,7 @@ trait Typers extends Modes with Adaptations with Tags {
methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe)
val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe)
- val body = methodBodyTyper.virtualizedMatch(match_ addAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
+ val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe)
DefDef(methodSym, body)
}
@@ -2505,7 +2544,7 @@ trait Typers extends Modes with Adaptations with Tags {
def translated =
if (members.head eq EmptyTree) setError(tree)
- else typed(atPos(tree.pos)(Block(List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, tree.pos.focus)), atPos(tree.pos.focus)(New(anonClass.tpe)))), mode, pt)
+ else typed(atPos(tree.pos)(Block(List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus)), atPos(tree.pos.focus)(New(anonClass.tpe)))), mode, pt)
}
// Function(params, Match(sel, cases)) ==> new <Partial>Function { def apply<OrElse>(params) = `translateMatch('sel match { cases }')` }
@@ -2528,7 +2567,7 @@ trait Typers extends Modes with Adaptations with Tags {
* @param pt ...
* @return ...
*/
- def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+ private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
val numVparams = fun.vparams.length
if (numVparams > definitions.MaxFunctionArity)
return MaxFunctionArityError(fun)
@@ -2609,10 +2648,10 @@ trait Typers extends Modes with Adaptations with Tags {
val stats1 = typedStats(stats, NoSymbol)
// this code kicks in only after typer, so `stats` will never be filled in time
// as a result, most of compound type trees with non-empty stats will fail to reify
- // [Eugene++] todo. investigate whether something can be done about this
+ // todo. investigate whether something can be done about this
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
- templ addAttachment att.copy(stats = stats1)
+ templ updateAttachment att.copy(stats = stats1)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -2644,7 +2683,6 @@ trait Typers extends Modes with Adaptations with Tags {
def includesTargetPos(tree: Tree) =
tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos)
val localTarget = stats exists includesTargetPos
- val statsErrors = scala.collection.mutable.LinkedHashSet[AbsTypeError]()
def typedStat(stat: Tree): Tree = {
if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat))
OnlyDeclarationsError(stat)
@@ -2663,7 +2701,6 @@ trait Typers extends Modes with Adaptations with Tags {
stat
} else {
val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) {
- context.flushBuffer()
this
} else newTyper(context.make(stat, exprOwner))
// XXX this creates a spurious dead code warning if an exception is thrown
@@ -2680,7 +2717,6 @@ trait Typers extends Modes with Adaptations with Tags {
"a pure expression does nothing in statement position; " +
"you may be omitting necessary parentheses"
)
- statsErrors ++= localTyper.context.errBuffer
result
}
}
@@ -2742,12 +2778,18 @@ trait Typers extends Modes with Adaptations with Tags {
// this code by associating defaults and companion objects
// with the original tree instead of the new symbol.
def matches(stat: Tree, synt: Tree) = (stat, synt) match {
+ // synt is default arg for stat
case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) =>
mods.hasDefaultFlag && syntName.toString.startsWith(statName.toString)
+ // synt is companion module
case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) =>
className.toTermName == moduleName
+ // synt is implicit def for implicit class (#6278)
+ case (ClassDef(cmods, cname, _, _), DefDef(dmods, dname, _, _, _, _)) =>
+ cmods.isImplicit && dmods.isImplicit && cname.toTermName == dname
+
case _ => false
}
@@ -2763,12 +2805,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- val stats1 = withSavedContext(context) {
- val result = stats mapConserve typedStat
- context.flushBuffer()
- result
- }
- context.updateBuffer(statsErrors)
+ val stats1 = stats mapConserve typedStat
if (phase.erasedTypes) stats1
else {
checkNoDoubleDefs(stats1)
@@ -2859,73 +2896,79 @@ trait Typers extends Modes with Adaptations with Tags {
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
- var fun = fun0
- if (fun.hasSymbol && fun.symbol.isOverloaded) {
- // remove alternatives with wrong number of parameters without looking at types.
- // less expensive than including them in inferMethodAlternatvie (see below).
- def shapeType(arg: Tree): Type = arg match {
- case Function(vparams, body) =>
- functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
- case AssignOrNamedArg(Ident(name), rhs) =>
- NamedType(name, shapeType(rhs))
- case _ =>
- NothingClass.tpe
- }
- val argtypes = args map shapeType
- val pre = fun.symbol.tpe.prefix
-
- var sym = fun.symbol filter { alt =>
- // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
- // now fixed by using isWeaklyCompatible in exprTypeArgs
- // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
- // (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
- //
- // @PP responds: I changed it to pass WildcardType instead of pt and only one line in
- // trunk (excluding scalacheck, which had another) failed to compile. It was this line in
- // Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
- // least two invariant type parameters. See the test case I checked in to help backstop:
- // pos/isApplicableSafe.scala.
- isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
- }
- if (sym.isOverloaded) {
- val sym1 = sym filter (alt => {
- // eliminate functions that would result from tupling transforms
- // keeps alternatives with repeated params
- hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
- // also keep alts which define at least one default
- alt.tpe.paramss.exists(_.exists(_.hasDefault))
- })
- if (sym1 != NoSymbol) sym = sym1
- }
- if (sym != NoSymbol)
- fun = adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ def preSelectOverloaded(fun: Tree): Tree = {
+ if (fun.hasSymbol && fun.symbol.isOverloaded) {
+ // remove alternatives with wrong number of parameters without looking at types.
+ // less expensive than including them in inferMethodAlternatvie (see below).
+ def shapeType(arg: Tree): Type = arg match {
+ case Function(vparams, body) =>
+ functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
+ case AssignOrNamedArg(Ident(name), rhs) =>
+ NamedType(name, shapeType(rhs))
+ case _ =>
+ NothingClass.tpe
+ }
+ val argtypes = args map shapeType
+ val pre = fun.symbol.tpe.prefix
+
+ var sym = fun.symbol filter { alt =>
+ // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
+ // now fixed by using isWeaklyCompatible in exprTypeArgs
+ // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
+ // (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
+ //
+ // @PP responds: I changed it to pass WildcardType instead of pt and only one line in
+ // trunk (excluding scalacheck, which had another) failed to compile. It was this line in
+ // Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
+ // least two invariant type parameters. See the test case I checked in to help backstop:
+ // pos/isApplicableSafe.scala.
+ isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
+ }
+ if (sym.isOverloaded) {
+ val sym1 = sym filter (alt => {
+ // eliminate functions that would result from tupling transforms
+ // keeps alternatives with repeated params
+ hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
+ // also keep alts which define at least one default
+ alt.tpe.paramss.exists(_.exists(_.hasDefault))
+ })
+ if (sym1 != NoSymbol) sym = sym1
+ }
+ if (sym == NoSymbol) fun
+ else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ } else fun
}
+ val fun = preSelectOverloaded(fun0)
+
fun.tpe match {
case OverloadedType(pre, alts) =>
- val undetparams = context.extractUndetparams()
-
- val argtpes = new ListBuffer[Type]
- val amode = forArgMode(fun, mode)
- val args1 = args map {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- // named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
- argtpes += NamedType(name, rhs1.tpe.deconst)
- // the assign is untyped; that's ok because we call doTypedApply
- atPos(arg.pos) { new AssignOrNamedArg(arg.lhs , rhs1) }
- case arg =>
- val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
- argtpes += arg1.tpe.deconst
- arg1
- }
- context.undetparams = undetparams
- if (context.hasErrors)
- setError(tree)
- else {
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
- doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ def handleOverloaded = {
+ val undetparams = context.extractUndetparams()
+
+ val argtpes = new ListBuffer[Type]
+ val amode = forArgMode(fun, mode)
+ val args1 = args map {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+ val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
+ argtpes += NamedType(name, rhs1.tpe.deconst)
+ // the assign is untyped; that's ok because we call doTypedApply
+ atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) }
+ case arg =>
+ val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
+ argtpes += arg1.tpe.deconst
+ arg1
+ }
+ context.undetparams = undetparams
+ if (context.hasErrors)
+ setError(tree)
+ else {
+ inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
+ doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ }
}
+ handleOverloaded
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
@@ -3046,89 +3089,107 @@ trait Typers extends Modes with Adaptations with Tags {
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
- // In order for checkDead not to be misled by the unfortunate special
- // case of AnyRef#synchronized (which is implemented with signature T => T
- // but behaves as if it were (=> T) => T) we need to know what is the actual
- // target of a call. Since this information is no longer available from
- // typedArg, it is recorded here.
- checkDead.updateExpr(fun)
- val args1 = typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
- // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
- // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
- // precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
- def ifPatternSkipFormals(tp: Type) = tp match {
- case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
- case _ => tp
- }
+ def handleMonomorphicCall: Tree = {
+ // In order for checkDead not to be misled by the unfortunate special
+ // case of AnyRef#synchronized (which is implemented with signature T => T
+ // but behaves as if it were (=> T) => T) we need to know what is the actual
+ // target of a call. Since this information is no longer available from
+ // typedArg, it is recorded here.
+ checkDead.updateExpr(fun)
+
+ val args1 =
+ // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+ // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+ // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+ // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+ // casting breaks SI-6145,
+ // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+ if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+ typedArgs(args, forArgMode(fun, mode))
+ else
+ typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+
+ // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
+ // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
+ // precise(foo) : foo.type => foo.type
+ val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
+ def ifPatternSkipFormals(tp: Type) = tp match {
+ case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
+ case _ => tp
+ }
- // Replace the Delegate-Chainer methods += and -= with corresponding
- // + and - calls, which are translated in the code generator into
- // Combine and Remove
- if (forMSIL) {
- fun match {
- case Select(qual, name) =>
- if (isSubType(qual.tpe, DelegateClass.tpe)
- && (name == encode("+=") || name == encode("-=")))
- {
- val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
- val f = Select(qual, n)
- // the compiler thinks, the PLUS method takes only one argument,
- // but he thinks it's an instance method -> still two ref's on the stack
- // -> translated by backend
- val rhs = treeCopy.Apply(tree, f, args)
- return typed(Assign(qual, rhs))
- }
- case _ => ()
+ // Replace the Delegate-Chainer methods += and -= with corresponding
+ // + and - calls, which are translated in the code generator into
+ // Combine and Remove
+ if (forMSIL) {
+ fun match {
+ case Select(qual, name) =>
+ if (isSubType(qual.tpe, DelegateClass.tpe)
+ && (name == encode("+=") || name == encode("-="))) {
+ val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
+ val f = Select(qual, n)
+ // the compiler thinks, the PLUS method takes only one argument,
+ // but he thinks it's an instance method -> still two ref's on the stack
+ // -> translated by backend
+ val rhs = treeCopy.Apply(tree, f, args)
+ return typed(Assign(qual, rhs))
+ }
+ case _ => ()
+ }
}
- }
- /** This is translating uses of List() into Nil. This is less
- * than ideal from a consistency standpoint, but it shouldn't be
- * altered without due caution.
- * ... this also causes bootstrapping cycles if List_apply is
- * forced during kind-arity checking, so it is guarded by additional
- * tests to ensure we're sufficiently far along.
- */
- if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
- atPos(tree.pos)(gen.mkNil setType restpe)
- else
- constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
+ /**
+ * This is translating uses of List() into Nil. This is less
+ * than ideal from a consistency standpoint, but it shouldn't be
+ * altered without due caution.
+ * ... this also causes bootstrapping cycles if List_apply is
+ * forced during kind-arity checking, so it is guarded by additional
+ * tests to ensure we're sufficiently far along.
+ */
+ if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+ atPos(tree.pos)(gen.mkNil setType restpe)
+ else
+ constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
+ }
+ handleMonomorphicCall
} else if (needsInstantiation(tparams, formals, args)) {
//println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info)))
inferExprInstance(fun, tparams)
doTypedApply(tree, fun, args, mode, pt)
} else {
- assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
- val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
- val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
- if (targ == WildcardType) tparam.tpeHK else targ)
- var remainingParams = paramTypes
- def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
- val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
- val newmode =
- if (isByNameParamType(remainingParams.head)) POLYmode
- else POLYmode | BYVALmode
- if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
- val arg1 = typedArg(arg, forArgMode(fun, mode), newmode, lenientPt)
- val argtparams = context.extractUndetparams()
- if (!argtparams.isEmpty) {
- val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
- inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
- arg1
- } else arg1
- }
- val args1 = map2(args, formals)(typedArgToPoly)
- if (args1 exists {_.isErrorTyped}) duplErrTree
- else {
- debuglog("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug
- // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
- // returns those undetparams which have not been instantiated.
- val undetparams = inferMethodInstance(fun, tparams, args1, pt)
- val result = doTypedApply(tree, fun, args1, mode, pt)
- context.undetparams = undetparams
- result
+ def handlePolymorphicCall = {
+ assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
+ val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
+ val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
+ if (targ == WildcardType) tparam.tpeHK else targ)
+ var remainingParams = paramTypes
+ def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
+ val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
+ val newmode =
+ if (isByNameParamType(remainingParams.head)) POLYmode
+ else POLYmode | BYVALmode
+ if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
+ val arg1 = typedArg(arg, forArgMode(fun, mode), newmode, lenientPt)
+ val argtparams = context.extractUndetparams()
+ if (!argtparams.isEmpty) {
+ val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
+ inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
+ arg1
+ } else arg1
+ }
+ val args1 = map2(args, formals)(typedArgToPoly)
+ if (args1 exists { _.isErrorTyped }) duplErrTree
+ else {
+ debuglog("infer method inst " + fun + ", tparams = " + tparams + ", args = " + args1.map(_.tpe) + ", pt = " + pt + ", lobounds = " + tparams.map(_.tpe.bounds.lo) + ", parambounds = " + tparams.map(_.info)) //debug
+ // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
+ // returns those undetparams which have not been instantiated.
+ val undetparams = inferMethodInstance(fun, tparams, args1, pt)
+ val result = doTypedApply(tree, fun, args1, mode, pt)
+ context.undetparams = undetparams
+ result
+ }
}
+ handlePolymorphicCall
}
}
@@ -3205,7 +3266,7 @@ trait Typers extends Modes with Adaptations with Tags {
val nbSubPats = args.length
val (formals, formalsExpanded) = extractorFormalTypes(resTp, nbSubPats, fun1.symbol)
- if (formals == null) duplErrorTree(WrongNumberArgsPatternError(tree, fun))
+ if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
// This used to be the following (failing) assert:
@@ -3381,7 +3442,7 @@ trait Typers extends Modes with Adaptations with Tags {
else argss.head
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
- val names = new collection.mutable.HashSet[Symbol]
+ val names = new scala.collection.mutable.HashSet[Symbol]
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
val nvPairs = args map {
@@ -3412,7 +3473,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (hasError) annotationError
- else AnnotationInfo(annType, List(), nvPairs map {p => (p._1.asInstanceOf[Name], p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos)) // [Eugene+] why do we need this cast?
+ else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
} else if (requireJava) {
reportAnnotationError(NestedAnnotationError(ann, annType))
@@ -3569,9 +3630,9 @@ trait Typers extends Modes with Adaptations with Tags {
def isCapturedExistential(sym: Symbol) =
(sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = Statistics.startTimer(isReferencedNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
try !isReferencedFrom(context, sym)
- finally Statistics.stopTimer(isReferencedNanos, start)
+ finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
}
def packCaptured(tpe: Type): Type = {
@@ -3595,8 +3656,8 @@ trait Typers extends Modes with Adaptations with Tags {
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
- var localSyms = collection.immutable.Set[Symbol]()
- var boundSyms = collection.immutable.Set[Symbol]()
+ var localSyms = scala.collection.immutable.Set[Symbol]()
+ var boundSyms = scala.collection.immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
@@ -3769,7 +3830,8 @@ trait Typers extends Modes with Adaptations with Tags {
case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs))
case _ => gen.mkTuple(List(CODE.LIT(""), arg))
}
- typed(treeCopy.Apply(orig, fun, args map argToBinding), mode, pt)
+ val t = treeCopy.Apply(orig, fun, args map argToBinding)
+ wrapErrors(t, _.typed(t, mode, pt))
}
/** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic.
@@ -3791,25 +3853,34 @@ trait Typers extends Modes with Adaptations with Tags {
* - simplest solution: have two method calls
*
*/
- def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] =
+ def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
+ debuglog(s"mkInvoke($cxTree, $tree, $qual, $name)")
acceptsApplyDynamicWithType(qual, name) map { tp =>
// tp eq NoType => can call xxxDynamic, but not passing any type args (unless specified explicitly by the user)
// in scala-virtualized, when not NoType, tp is passed as type argument (for selection on a staged Struct)
- // strip off type application -- we're not doing much with outer, so don't bother preserving cxTree's attributes etc
- val (outer, explicitTargs) = cxTree match {
- case TypeApply(fun, targs) => (fun, targs)
- case Apply(TypeApply(fun, targs), args) => (Apply(fun, args), targs)
- case t => (t, Nil)
+ // strip off type application -- we're not doing much with outer,
+ // so don't bother preserving cxTree's attributes etc
+ val cxTree1 = cxTree match {
+ case t: ValOrDefDef => t.rhs
+ case t => t
}
+ val (outer, explicitTargs) = cxTree1 match {
+ case TypeApply(fun, targs) => (fun, targs)
+ case Apply(TypeApply(fun, targs), args) => (Apply(fun, args), targs)
+ case t => (t, Nil)
+ }
+ def hasNamedArg(as: List[Tree]) = as.collectFirst{case AssignOrNamedArg(lhs, rhs) =>}.nonEmpty
- @inline def hasNamedArg(as: List[Tree]) = as.collectFirst{case AssignOrNamedArg(lhs, rhs) =>}.nonEmpty
-
+ def desugaredApply = tree match {
+ case Select(`qual`, nme.apply) => true
+ case _ => false
+ }
// note: context.tree includes at most one Apply node
// thus, we can't use it to detect we're going to receive named args in expressions such as:
// qual.sel(a)(a2, arg2 = "a2")
val oper = outer match {
- case Apply(`tree`, as) =>
+ case Apply(q, as) if q == tree || desugaredApply =>
val oper =
if (hasNamedArg(as)) nme.applyDynamicNamed
else nme.applyDynamic
@@ -3827,10 +3898,18 @@ trait Typers extends Modes with Adaptations with Tags {
atPos(qual.pos)(Apply(tappSel, List(Literal(Constant(name.decode)))))
}
+ }
+
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
+ silent(typeTree) match {
+ case SilentResultValue(r) => r
+ case SilentTypeError(err) => DynamicRewriteError(tree, err)
+ }
+ }
}
- @inline final def deindentTyping() = context.typingIndentLevel -= 2
- @inline final def indentTyping() = context.typingIndentLevel += 2
+ final def deindentTyping() = context.typingIndentLevel -= 2
+ final def indentTyping() = context.typingIndentLevel += 2
@inline final def printTyping(s: => String) = {
if (printTypings)
println(context.typingIndent + s.replaceAll("\n", "\n" + context.typingIndent))
@@ -3851,7 +3930,9 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => NoType
}
- def typedAnnotated(ann: Tree, arg1: Tree): Tree = {
+ def typedAnnotated(atd: Annotated): Tree = {
+ val ann = atd.annot
+ val arg1 = typed(atd.arg, mode, pt)
/** mode for typing the annotation itself */
val annotMode = mode & ~TYPEmode | EXPRmode
@@ -3930,7 +4011,9 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedBind(name: Name, body: Tree) =
+ def typedBind(tree: Bind) = {
+ val name = tree.name
+ val body = tree.body
name match {
case name: TypeName => assert(body == EmptyTree, context.unit + " typedBind: " + name.debugString + " " + body + " " + body.getClass)
val sym =
@@ -3974,11 +4057,11 @@ trait Typers extends Modes with Adaptations with Tags {
tree setSymbol sym
treeCopy.Bind(tree, name, body1) setSymbol sym setType body1.tpe
}
+ }
-
- def typedArrayValue(elemtpt: Tree, elems: List[Tree]) = {
- val elemtpt1 = typedType(elemtpt, mode)
- val elems1 = elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
+ def typedArrayValue(tree: ArrayValue) = {
+ val elemtpt1 = typedType(tree.elemtpt, mode)
+ val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
treeCopy.ArrayValue(tree, elemtpt1, elems1)
.setType(
(if (isFullyDefined(pt) && !phase.erasedTypes) pt
@@ -4016,22 +4099,25 @@ trait Typers extends Modes with Adaptations with Tags {
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
- typed1(Apply(lhs1, List(rhs1)), mode, pt)
+ val t = Apply(lhs1, List(rhs1))
+ dyna.wrapErrors(t, _.typed1(t, mode, pt))
}
else fail()
}
- def typedIf(cond: Tree, thenp: Tree, elsep: Tree) = {
- val cond1 = checkDead(typed(cond, EXPRmode | BYVALmode, BooleanClass.tpe))
+ def typedIf(tree: If) = {
+ val cond1 = checkDead(typed(tree.cond, EXPRmode | BYVALmode, BooleanClass.tpe))
+ val thenp = tree.thenp
+ val elsep = tree.elsep
if (elsep.isEmpty) { // in the future, should be unnecessary
val thenp1 = typed(thenp, UnitClass.tpe)
treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
} else {
var thenp1 = typed(thenp, pt)
var elsep1 = typed(elsep, pt)
+ def thenTp = packedType(thenp1, context.owner)
+ def elseTp = packedType(elsep1, context.owner)
- lazy val thenTp = packedType(thenp1, context.owner)
- lazy val elseTp = packedType(elsep1, context.owner)
// println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
val (owntype, needAdapt) =
// in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
@@ -4041,9 +4127,9 @@ trait Typers extends Modes with Adaptations with Tags {
if ( opt.virtPatmat && !isPastTyper
&& thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
&& thenTp =:= elseTp
- ) (thenp1.tpe, false) // use unpacked type
+ ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
// TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
- else ptOrLub(List(thenp1.tpe, elsep1.tpe), pt)
+ else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
@@ -4056,7 +4142,9 @@ trait Typers extends Modes with Adaptations with Tags {
// under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
// otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
// empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
- def typedVirtualizedMatch(tree: Tree, selector: Tree, cases: List[CaseDef]): Tree =
+ def typedVirtualizedMatch(tree: Match): Tree = {
+ val selector = tree.selector
+ val cases = tree.cases
if (selector == EmptyTree) {
if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass)) (new MatchFunTyper(tree, cases, mode, pt)).translated
else {
@@ -4073,8 +4161,10 @@ trait Typers extends Modes with Adaptations with Tags {
}
} else
virtualizedMatch(typedMatch(selector, cases, mode, pt, tree), mode, pt)
+ }
- def typedReturn(expr: Tree) = {
+ def typedReturn(tree: Return) = {
+ val expr = tree.expr
val enclMethod = context.enclMethod
if (enclMethod == NoContext ||
enclMethod.owner.isConstructor ||
@@ -4102,7 +4192,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedNew(tpt: Tree) = {
+ def typedNew(tree: New) = {
+ val tpt = tree.tpt
val tpt1 = {
val tpt0 = typedTypeConstructor(tpt)
if (checkStablePrefixClassType(tpt0))
@@ -4210,10 +4301,10 @@ trait Typers extends Modes with Adaptations with Tags {
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = Statistics.startTimer(failedApplyNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
def onError(typeError: AbsTypeError): Tree = {
- Statistics.stopTimer(failedApplyNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
// See #4712 for a case where this situation arises,
@@ -4269,15 +4360,15 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedApply(fun: Tree, args: List[Tree]) = {
+ def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
if (stableApplication && isPatternMode) {
// treat stable function applications f() as expressions.
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
- val appStart = Statistics.startTimer(failedApplyNanos)
- val opeqStart = Statistics.startTimer(failedOpEqNanos)
+ val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+ val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
def onError(reportError: => Tree): Tree = {
fun match {
@@ -4285,14 +4376,14 @@ trait Typers extends Modes with Adaptations with Tags {
if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
- Statistics.stopTimer(failedOpEqNanos, opeqStart)
+ if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args)
} else {
- Statistics.stopTimer(failedApplyNanos, appStart)
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
case _ =>
- Statistics.stopTimer(failedApplyNanos, appStart)
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
}
@@ -4301,7 +4392,7 @@ trait Typers extends Modes with Adaptations with Tags {
if ((mode & EXPRmode) != 0) tree else context.tree) match {
case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- Statistics.incCounter(typedApplyCount)
+ if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
def isImplicitMethod(tpe: Type) = tpe match {
case mt: MethodType => mt.isImplicit
case _ => false
@@ -4340,6 +4431,38 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+ def typedApply(tree: Apply) = {
+ val fun = tree.fun
+ val args = tree.args
+ fun match {
+ case Block(stats, expr) =>
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+ case _ =>
+ normalTypedApply(tree, fun, args) match {
+ case Apply(Select(New(tpt), name), args)
+ if (tpt.tpe != null &&
+ tpt.tpe.typeSymbol == ArrayClass &&
+ args.length == 1 &&
+ erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
+ // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+ // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
+ // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
+ val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
+ val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
+ val newArrayApp = atPos(tree.pos) {
+ val tag = resolveClassTag(tree.pos, tagType)
+ if (tag.isEmpty) MissingClassTagError(tree, tagType)
+ else new ApplyToImplicitArgs(Select(tag, nme.newArray), args)
+ }
+ typed(newArrayApp, mode, pt)
+ case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
+ TooManyArgumentListsForConstructor(tree)
+ case tree1 =>
+ tree1
+ }
+ }
+ }
+
def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
val prefix = name.toTermName stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
@@ -4385,8 +4508,9 @@ trait Typers extends Modes with Adaptations with Tags {
typed1(tree1, mode, pt)
}
- def typedSuper(qual: Tree, mix: TypeName) = {
- val qual1 = typed(qual)
+ def typedSuper(tree: Super) = {
+ val mix = tree.mix
+ val qual1 = typed(tree.qual)
val clazz = qual1 match {
case This(_) => qual1.symbol
@@ -4429,12 +4553,13 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
}
- def typedThis(qual: Name) = tree.symbol orElse qualifyingClass(tree, qual, packageOK = false) match {
- case NoSymbol => tree
- case clazz =>
- tree setSymbol clazz setType clazz.thisType.underlying
- if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
- }
+ def typedThis(tree: This) =
+ tree.symbol orElse qualifyingClass(tree, tree.qual, packageOK = false) match {
+ case NoSymbol => tree
+ case clazz =>
+ tree setSymbol clazz setType clazz.thisType.underlying
+ if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
+ }
/** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
* <code>qual</code> is already attributed.
@@ -4443,8 +4568,10 @@ trait Typers extends Modes with Adaptations with Tags {
* @param name ...
* @return ...
*/
- def typedSelect(qual: Tree, name: Name): Tree = {
- def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map (typed1(_, mode, pt))
+ def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
+ def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
+ dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
+ }
val sym = tree.symbol orElse member(qual, name) orElse {
// symbol not found? --> try to convert implicitly to a type that does have the required
@@ -4461,41 +4588,43 @@ trait Typers extends Modes with Adaptations with Tags {
qual.tpe = tree.symbol.owner.tpe
if (!reallyExists(sym)) {
- if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
- val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
- if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
- }
+ def handleMissing: Tree = {
+ if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
+ val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
+ if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
+ }
- // try to expand according to Dynamic rules.
- asDynamicCall foreach (x => return x)
+ // try to expand according to Dynamic rules.
+ asDynamicCall foreach (x => return x)
- debuglog(
- "qual = "+qual+":"+qual.tpe+
- "\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+
- "\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+
- "\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner
- )
+ debuglog(
+ "qual = " + qual + ":" + qual.tpe +
+ "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info +
+ "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members +
+ "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner)
- def makeInteractiveErrorTree = {
- val tree1 = tree match {
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ def makeInteractiveErrorTree = {
+ val tree1 = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ setError(tree1)
}
- setError(tree1)
- }
- if (name == nme.ERROR && forInteractive)
- return makeInteractiveErrorTree
+ if (name == nme.ERROR && forInteractive)
+ return makeInteractiveErrorTree
- if (!qual.tpe.widen.isErroneous) {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+ if (!qual.tpe.widen.isErroneous) {
+ if ((mode & QUALmode) != 0) {
+ val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
+ if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+ }
+ NotAMemberError(tree, qual, name)
}
- NotAMemberError(tree, qual, name)
- }
- if (forInteractive) makeInteractiveErrorTree else setError(tree)
+ if (forInteractive) makeInteractiveErrorTree else setError(tree)
+ }
+ handleMissing
} else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
@@ -4551,6 +4680,49 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+ def typedSelectOrSuperCall(tree: Select) = {
+ val qual = tree.qualifier
+ val name = tree.name
+ qual match {
+ case _: Super if name == nme.CONSTRUCTOR =>
+ val qual1 =
+ typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
+ // the qualifier type of a supercall constructor is its first parent class
+ typedSelect(tree, qual1, nme.CONSTRUCTOR)
+ case _ =>
+ if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
+ var qual1 = checkDead(typedQualifier(qual, mode))
+ if (name.isTypeName) qual1 = checkStable(qual1)
+
+ val tree1 = // temporarily use `filter` and an alternative for `withFilter`
+ if (name == nme.withFilter)
+ silent(_ => typedSelect(tree, qual1, name)) match {
+ case SilentResultValue(result) =>
+ result
+ case _ =>
+ silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
+ case SilentResultValue(result2) =>
+ unit.deprecationWarning(
+ tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen +
+ ", using `filter' method instead")
+ result2
+ case SilentTypeError(err) =>
+ WithFilterError(tree, err)
+ }
+ }
+ else
+ typedSelect(tree, qual1, name)
+
+ if (tree.isInstanceOf[PostfixSelect])
+ checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+ if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
+ checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
+
+ if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
+ else tree1
+ }
+ }
+
/** Attribute an identifier consisting of a simple name or an outer reference.
*
* @param tree The tree representing the identifier.
@@ -4558,21 +4730,17 @@ trait Typers extends Modes with Adaptations with Tags {
* Transformations: (1) Prefix class members with this.
* (2) Change imported symbols to selections
*/
- def typedIdent(name: Name): Tree = {
+ def typedIdent(tree: Tree, name: Name): Tree = {
var errorContainer: AbsTypeError = null
- @inline
def ambiguousError(msg: String) = {
assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
errorContainer = AmbiguousIdentError(tree, name, msg)
}
- @inline
def identError(tree: AbsTypeError) = {
assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
errorContainer = tree
}
- val fingerPrint: Long = name.fingerPrint
-
var defSym: Symbol = tree.symbol // the directly found symbol
var pre: Type = NoPrefix // the prefix type of defSym, if a class member
var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select
@@ -4610,10 +4778,7 @@ trait Typers extends Modes with Adaptations with Tags {
var cx = startingIdentContext
while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
pre = cx.enclClass.prefix
- defEntry = {
- val scope = cx.scope
- if ((fingerPrint & scope.fingerPrints) != 0) scope.lookupEntry(name) else null
- }
+ defEntry = cx.scope.lookupEntry(name)
if ((defEntry ne null) && qualifies(defEntry.sym)) {
// Right here is where SI-1987, overloading in package objects, can be
// seen to go wrong. There is an overloaded symbol, but when referring
@@ -4627,8 +4792,13 @@ trait Typers extends Modes with Adaptations with Tags {
if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
defSym = pre.member(defEntry.sym.name)
if (defSym ne defEntry.sym) {
- log("!!! Overloaded package object member resolved incorrectly.\n Discarded: " +
- defEntry.sym.defString + "\n Using: " + defSym.defString)
+ qual = gen.mkAttributedQualifier(pre)
+ log(s"""
+ | !!! Overloaded package object member resolved incorrectly.
+ | prefix: $pre
+ | Discarded: ${defEntry.sym.defString}
+ | Using: ${defSym.defString}
+ """.stripMargin)
}
}
else
@@ -4801,7 +4971,18 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedCompoundTypeTree(templ: Template) = {
+ def typedIdentOrWildcard(tree: Ident) = {
+ val name = tree.name
+ if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
+ if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
+ (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
+ tree setType makeFullyDefined(pt)
+ else
+ typedIdent(tree, name)
+ }
+
+ def typedCompoundTypeTree(tree: CompoundTypeTree) = {
+ val templ = tree.templ
val parents1 = templ.parents mapConserve (typedType(_, mode))
if (parents1 exists (_.isErrorTyped)) tree setType ErrorType
else {
@@ -4809,12 +4990,14 @@ trait Typers extends Modes with Adaptations with Tags {
//Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id)
val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos)
newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ)
- templ addAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
+ templ updateAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
tree setType self
}
}
- def typedAppliedTypeTree(tpt: Tree, args: List[Tree]) = {
+ def typedAppliedTypeTree(tree: AppliedTypeTree) = {
+ val tpt = tree.tpt
+ val args = tree.args
val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
if (tpt1.isErrorTyped) {
tpt1
@@ -4865,370 +5048,311 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- // begin typed1
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
- //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
- tree match {
- case PackageDef(pid, stats) =>
- val pid1 = typedQualifier(pid).asInstanceOf[RefTree]
- assert(sym.moduleClass ne NoSymbol, sym)
- // complete lazy annotations
- val annots = sym.annotations
- val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
- .typedStats(stats, NoSymbol)
- treeCopy.PackageDef(tree, pid1, stats1) setType NoType
-
- case tree @ ClassDef(_, _, _, _) =>
- newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
-
- case tree @ ModuleDef(_, _, _) =>
- newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
-
- case vdef @ ValDef(_, _, _, _) =>
- typedValDef(vdef)
-
- case ddef @ DefDef(_, _, _, _, _, _) =>
- // flag default getters for constructors. An actual flag would be nice. See SI-5543.
- //val flag = ddef.mods.hasDefaultFlag && ddef.mods.hasFlag(PRESUPER)
- val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
- nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
- newTyper(context.makeNewScope(tree, sym)).constrTyperIf(flag).typedDefDef(ddef)
-
- case tdef @ TypeDef(_, _, _, _) =>
- typedTypeDef(tdef)
- case ldef @ LabelDef(_, _, _) =>
- labelTyper(ldef).typedLabelDef(ldef)
-
- case ddef @ DocDef(comment, defn) =>
- if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
- docComments(sym) = comment
- comment.defineVariables(sym)
- val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases) {
- typer1.silent(_.typedUseCase(useCase)) match {
- case SilentTypeError(err) =>
- unit.warning(useCase.pos, err.errMsg)
- case _ =>
- }
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
+ def typedPackageDef(pdef: PackageDef) = {
+ val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree]
+ assert(sym.moduleClass ne NoSymbol, sym)
+ // complete lazy annotations
+ val annots = sym.annotations
+ val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
+ .typedStats(pdef.stats, NoSymbol)
+ treeCopy.PackageDef(tree, pid1, stats1) setType NoType
+ }
+
+ def typedDocDef(docdef: DocDef) = {
+ val comment = docdef.comment
+ if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ val typer1 = newTyper(context.makeNewScope(tree, context.owner))
+ for (useCase <- comment.useCases) {
+ typer1.silent(_.typedUseCase(useCase)) match {
+ case SilentTypeError(err) =>
+ unit.warning(useCase.pos, err.errMsg)
+ case _ =>
+ }
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
}
}
- typed(defn, mode, pt)
+ }
+ typed(docdef.definition, mode, pt)
+ }
+
+ def defDefTyper(ddef: DefDef) = {
+ val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(flag)
+ }
+
+ def typedAlternative(alt: Alternative) = {
+ val alts1 = alt.trees mapConserve (alt => typed(alt, mode | ALTmode, pt))
+ treeCopy.Alternative(tree, alts1) setType pt
+ }
+
+ def typedStar(tree: Star) = {
+ if ((mode & STARmode) == 0 && !isPastTyper)
+ StarPatternWithVarargParametersError(tree)
+ treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
+ }
- case Annotated(constr, arg) =>
- typedAnnotated(constr, typed(arg, mode, pt))
+ def typedUnApply(tree: UnApply) = {
+ val fun1 = typed(tree.fun)
+ val tpes = formalTypes(unapplyTypeList(tree.fun.symbol, fun1.tpe, tree.args.length), tree.args.length)
+ val args1 = map2(tree.args, tpes)(typedPattern)
+ treeCopy.UnApply(tree, fun1, args1) setType pt
+ }
- case tree @ Block(_, _) =>
- typerWithLocalContext(context.makeNewScope(tree, context.owner)){
- _.typedBlock(tree, mode, pt)
+ def typedTry(tree: Try) = {
+ var block1 = typed(tree.block, pt)
+ var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
+
+ for (cdef <- catches1 if cdef.guard.isEmpty) {
+ def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
+ cdef.pat match {
+ case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
+ case i @ Ident(name) if unbound(i) => warn(name)
+ case _ =>
}
+ }
- case Alternative(alts) =>
- val alts1 = alts mapConserve (alt => typed(alt, mode | ALTmode, pt))
- treeCopy.Alternative(tree, alts1) setType pt
+ val finalizer1 =
+ if (tree.finalizer.isEmpty) tree.finalizer
+ else typed(tree.finalizer, UnitClass.tpe)
+ val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
+ if (needAdapt) {
+ block1 = adapt(block1, mode, owntype)
+ catches1 = catches1 map (adaptCase(_, mode, owntype))
+ }
- case Star(elem) =>
- if ((mode & STARmode) == 0 && !isPastTyper)
- StarPatternWithVarargParametersError(tree)
- treeCopy.Star(tree, typed(elem, mode, pt)) setType makeFullyDefined(pt)
+ treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+ }
- case Bind(name, body) =>
- typedBind(name, body)
+ def typedThrow(tree: Throw) = {
+ val expr1 = typed(tree.expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
+ treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+ }
- case UnApply(fun, args) =>
- val fun1 = typed(fun)
- val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe, args.length), args.length)
- val args1 = map2(args, tpes)(typedPattern)
- treeCopy.UnApply(tree, fun1, args1) setType pt
+ def typedTyped(tree: Typed) = {
+ val expr = tree.expr
+ val tpt = tree.tpt
+ tpt match {
+ case Function(List(), EmptyTree) =>
+ // find out whether the programmer is trying to eta-expand a macro def
+ // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+ // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+ // Q: "but, " - you may ask - ", `typed1` doesn't call adapt, which does macro expansion, so why explicit check?"
+ // A: solely for robustness reasons. this mechanism might change in the future, which might break unprotected code
+ val exprTyped = context.withMacrosDisabled(typed1(expr, mode, pt))
+ exprTyped match {
+ case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
+ MacroEtaError(exprTyped)
+ case _ =>
+ typedEta(checkDead(exprTyped))
+ }
- case ArrayValue(elemtpt, elems) =>
- typedArrayValue(elemtpt, elems)
+ case Ident(tpnme.WILDCARD_STAR) =>
+ val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType)
+ def subArrayType(pt: Type) =
+ if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
+ else {
+ val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
+ newExistentialType(List(tparam), arrayType(tparam.tpe))
+ }
- case tree @ Function(_, _) =>
- if (tree.symbol == NoSymbol)
- tree.symbol = context.owner.newAnonymousFunctionValue(tree.pos)
- typerWithLocalContext(context.makeNewScope(tree, tree.symbol))(_.typedFunction(tree, mode, pt))
+ val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
+ case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
+ case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass)
+ }
+ exprAdapted.tpe.baseType(baseClass) match {
+ case TypeRef(_, _, List(elemtp)) =>
+ treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+ case _ =>
+ setError(tree)
+ }
- case Assign(lhs, rhs) =>
- typedAssign(lhs, rhs)
+ case _ =>
+ val tptTyped = typedType(tpt, mode)
+ val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
+ val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
+
+ if (isPatternMode) {
+ val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
- case AssignOrNamedArg(lhs, rhs) => // called by NamesDefaults in silent typecheck
- typedAssign(lhs, rhs)
+ // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
+ val ptDefined = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+ val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
+ treeTyped setType ownType
- case If(cond, thenp, elsep) =>
- typedIf(cond, thenp, elsep)
+ uncheckedTypeExtractor match {
+ case None => treeTyped
+ case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
+ }
+ } else
+ treeTyped setType tptTyped.tpe
+ }
+ }
- case tree @ Match(selector, cases) =>
- typedVirtualizedMatch(tree, selector, cases)
+ def typedTypeApply(tree: TypeApply) = {
+ val fun = tree.fun
+ val args = tree.args
+ // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
+ //@M! we must type fun in order to type the args, as that requires the kinds of fun's type parameters.
+ // However, args should apparently be done first, to save context.undetparams. Unfortunately, the args
+ // *really* have to be typed *after* fun. We escape from this classic Catch-22 by simply saving&restoring undetparams.
- case Return(expr) =>
- typedReturn(expr)
+ // @M TODO: the compiler still bootstraps&all tests pass when this is commented out..
+ //val undets = context.undetparams
- case Try(block, catches, finalizer) =>
- var block1 = typed(block, pt)
- var catches1 = typedCases(catches, ThrowableClass.tpe, pt)
+ // @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
+ val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
+ val tparams = fun1.symbol.typeParams
- for (cdef <- catches1 if cdef.guard.isEmpty) {
- def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
- def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
- cdef.pat match {
- case Bind(name, i@Ident(_)) if unbound(i) => warn(name)
- case i@Ident(name) if unbound(i) => warn(name)
- case _ =>
- }
- }
+ //@M TODO: val undets_fun = context.undetparams ?
+ // "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
- val finalizer1 = if (finalizer.isEmpty) finalizer
- else typed(finalizer, UnitClass.tpe)
- val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
- if (needAdapt) {
- block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, mode, owntype))
- }
+ // @M TODO: the compiler still bootstraps when this is commented out.. TODO: run tests
+ //context.undetparams = undets
- treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
-
- case Throw(expr) =>
- val expr1 = typed(expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
- treeCopy.Throw(tree, expr1) setType NothingClass.tpe
-
- case New(tpt: Tree) =>
- typedNew(tpt)
-
- case Typed(expr, Function(List(), EmptyTree)) =>
- // find out whether the programmer is trying to eta-expand a macro def
- // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
- // that typecheck must not trigger macro expansions, so we explicitly prohibit them
- // Q: "but, " - you may ask - ", `typed1` doesn't call adapt, which does macro expansion, so why explicit check?"
- // A: solely for robustness reasons. this mechanism might change in the future, which might break unprotected code
- val expr1 = context.withMacrosDisabled(typed1(expr, mode, pt))
- expr1 match {
- case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
- MacroEtaError(expr1)
- case _ =>
- typedEta(checkDead(expr1))
- }
+ // @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
+ val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
+ //@M! the polytype denotes the expected kind
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ }
+ else {
+ //@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
+ // Until the right alternative for an overloaded method is known, be very liberal,
+ // typedTypeApply will find the right alternative and then do the same check as
+ // in the then-branch above. (see pos/tcpoly_overloaded.scala)
+ // this assert is too strict: be tolerant for errors like trait A { def foo[m[x], g]=error(""); def x[g] = foo[g/*ERR: missing argument type*/] }
+ //assert(fun1.symbol.info.isInstanceOf[OverloadedType] || fun1.symbol.isError) //, (fun1.symbol,fun1.symbol.info,fun1.symbol.info.getClass,args,tparams))
+ args mapConserve (typedHigherKindedType(_, mode))
+ }
- case Typed(expr0, tpt @ Ident(tpnme.WILDCARD_STAR)) =>
- val expr = typed(expr0, onlyStickyModes(mode), WildcardType)
- def subArrayType(pt: Type) =
- if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
- else {
- val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- newExistentialType(List(tparam), arrayType(tparam.tpe))
- }
+ //@M TODO: context.undetparams = undets_fun ?
+ Typer.this.typedTypeApply(tree, mode, fun1, args1)
+ }
- val (expr1, baseClass) = expr.tpe.typeSymbol match {
- case ArrayClass => (adapt(expr, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
- case _ => (adapt(expr, onlyStickyModes(mode), seqType(pt)), SeqClass)
- }
- expr1.tpe.baseType(baseClass) match {
- case TypeRef(_, _, List(elemtp)) =>
- treeCopy.Typed(tree, expr1, tpt setType elemtp) setType elemtp
- case _ =>
- setError(tree)
- }
+ def typedApplyDynamic(tree: ApplyDynamic) = {
+ assert(phase.erasedTypes)
+ val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
+ val qual1 = typed(tree.qual, AnyRefClass.tpe)
+ val args1 = tree.args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
+ treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
+ }
- case Typed(expr, tpt) =>
- val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
- val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
-
- if (isPatternMode) {
- val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
- val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, pt, canRemedy = uncheckedTypeExtractor.nonEmpty)
- // println(s"Typed($expr, ${tpt.tpe}) : $pt --> $ownType (${isFullyDefined(ownType)}, ${makeFullyDefined(ownType)})")
- // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- treeTyped setType (if (isFullyDefined(ownType)) ownType else makeFullyDefined(ownType)) //ownType
-
- uncheckedTypeExtractor match {
- case None => treeTyped
- case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
- }
- } else
- treeTyped setType tptTyped.tpe
-
- case TypeApply(fun, args) =>
- // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
- //@M! we must type fun in order to type the args, as that requires the kinds of fun's type parameters.
- // However, args should apparently be done first, to save context.undetparams. Unfortunately, the args
- // *really* have to be typed *after* fun. We escape from this classic Catch-22 by simply saving&restoring undetparams.
-
- // @M TODO: the compiler still bootstraps&all tests pass when this is commented out..
- //val undets = context.undetparams
-
- // @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
- val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
- val tparams = fun1.symbol.typeParams
-
- //@M TODO: val undets_fun = context.undetparams ?
- // "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
-
- // @M TODO: the compiler still bootstraps when this is commented out.. TODO: run tests
- //context.undetparams = undets
-
- // @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
- val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
- //@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
- } else {
- //@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
- // Until the right alternative for an overloaded method is known, be very liberal,
- // typedTypeApply will find the right alternative and then do the same check as
- // in the then-branch above. (see pos/tcpoly_overloaded.scala)
- // this assert is too strict: be tolerant for errors like trait A { def foo[m[x], g]=error(""); def x[g] = foo[g/*ERR: missing argument type*/] }
- //assert(fun1.symbol.info.isInstanceOf[OverloadedType] || fun1.symbol.isError) //, (fun1.symbol,fun1.symbol.info,fun1.symbol.info.getClass,args,tparams))
- args mapConserve (typedHigherKindedType(_, mode))
- }
+ def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
+ val id = tree.ident
+ val id1 = typed1(id, mode, pt) match { case id: Ident => id }
+ // [Eugene] am I doing it right?
+ val erasedTypes = phaseId(currentPeriod) >= currentRun.erasurePhase.id
+ val tpe = capturedVariableType(id.symbol, erasedTypes = erasedTypes)
+ treeCopy.ReferenceToBoxed(tree, id1) setType tpe
+ }
- //@M TODO: context.undetparams = undets_fun ?
- typedTypeApply(tree, mode, fun1, args1)
+ def typedLiteral(tree: Literal) = {
+ val value = tree.value
+ tree setType (
+ if (value.tag == UnitTag) UnitClass.tpe
+ else ConstantType(value))
+ }
- case Apply(Block(stats, expr), args) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+ def typedSingletonTypeTree(tree: SingletonTypeTree) = {
+ val ref1 = checkStable(
+ context.withImplicitsDisabled(
+ typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
+ )
+ )
+ tree setType ref1.tpe.resultType
+ }
- case Apply(fun, args) =>
- typedApply(fun, args) match {
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe != null &&
- tpt.tpe.typeSymbol == ArrayClass &&
- args.length == 1 &&
- erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
- // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
- // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
- // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
- val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
- val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
- val newArrayApp = atPos(tree.pos) {
- val tag = resolveClassTag(tree.pos, tagType)
- if (tag.isEmpty) MissingClassTagError(tree, tagType)
- else new ApplyToImplicitArgs(Select(tag, nme.newArray), args)
- }
- typed(newArrayApp, mode, pt)
- case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
- TooManyArgumentListsForConstructor(tree)
- case tree1 =>
- tree1
- }
+ def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
+ val qual1 = typedType(tree.qualifier, mode)
+ if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ else typedSelect(tree, qual1, tree.name)
+ }
- case ApplyDynamic(qual, args) =>
- assert(phase.erasedTypes)
- val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
- val qual1 = typed(qual, AnyRefClass.tpe)
- val args1 = args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
- treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
-
- case Super(qual, mix) =>
- typedSuper(qual, mix)
-
- case This(qual) =>
- typedThis(qual)
-
- case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
- val qual1 =
- typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
- // the qualifier type of a supercall constructor is its first parent class
- typedSelect(qual1, nme.CONSTRUCTOR)
-
- case Select(qual, name) =>
- Statistics.incCounter(typedSelectCount)
- var qual1 = checkDead(typedQualifier(qual, mode))
- if (name.isTypeName) qual1 = checkStable(qual1)
-
- val tree1 = // temporarily use `filter` and an alternative for `withFilter`
- if (name == nme.withFilter)
- silent(_ => typedSelect(qual1, name)) match {
- case SilentResultValue(result) =>
- result
- case _ =>
- silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
- case SilentResultValue(result2) =>
- unit.deprecationWarning(
- tree.pos, "`withFilter' method does not yet exist on "+qual1.tpe.widen+
- ", using `filter' method instead")
- result2
- case SilentTypeError(err) =>
- WithFilterError(tree, err)
- }
- }
- else
- typedSelect(qual1, name)
+ def typedTypeBoundsTree(tree: TypeBoundsTree) = {
+ val lo1 = typedType(tree.lo, mode)
+ val hi1 = typedType(tree.hi, mode)
+ treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
+ }
- if (tree.isInstanceOf[PostfixSelect])
- checkFeature(tree.pos, PostfixOpsFeature, name.decode)
- if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
- checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
+ def typedExistentialTypeTree(tree: ExistentialTypeTree) = {
+ val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
+ _.typedExistentialTypeTree(tree, mode)
+ }
+ checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
+ tree1
+ }
- if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
- else tree1
+ def typedTypeTree(tree: TypeTree) = {
+ if (tree.original != null)
+ tree setType typedType(tree.original, mode).tpe
+ else
+ // we should get here only when something before failed
+ // and we try again (@see tryTypedApply). In that case we can assign
+ // whatever type to tree; we just have to survive until a real error message is issued.
+ tree setType AnyClass.tpe
+ }
+ def typedFunction(fun: Function) = {
+ if (fun.symbol == NoSymbol)
+ fun.symbol = context.owner.newAnonymousFunctionValue(fun.pos)
- case Ident(name) =>
- Statistics.incCounter(typedIdentCount)
- if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
- (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
- tree setType makeFullyDefined(pt)
- else
- typedIdent(name)
-
- case ReferenceToBoxed(idt @ Ident(_)) =>
- val id1 = typed1(idt, mode, pt) match { case id: Ident => id }
- // [Eugene] am I doing it right?
- val erasedTypes = phaseId(currentPeriod) >= currentRun.erasurePhase.id
- val tpe = capturedVariableType(idt.symbol, erasedTypes = erasedTypes)
- treeCopy.ReferenceToBoxed(tree, id1) setType tpe
-
- case Literal(value) =>
- tree setType (
- if (value.tag == UnitTag) UnitClass.tpe
- else ConstantType(value))
-
- case SingletonTypeTree(ref) =>
- val ref1 = checkStable(
- typed(ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe))
- tree setType ref1.tpe.resultType
-
- case SelectFromTypeTree(qual, selector) =>
- val qual1 = typedType(qual, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
- else typedSelect(qual1, selector)
-
- case CompoundTypeTree(templ) =>
- typedCompoundTypeTree(templ)
-
- case AppliedTypeTree(tpt, args) =>
- typedAppliedTypeTree(tpt, args)
-
- case TypeBoundsTree(lo, hi) =>
- val lo1 = typedType(lo, mode)
- val hi1 = typedType(hi, mode)
- treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
-
- case etpt @ ExistentialTypeTree(_, _) =>
- val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
- _.typedExistentialTypeTree(etpt, mode)
- }
- checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
- tree1
+ typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
+ }
- case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
- case tpt @ TypeTree() =>
- if (tpt.original != null)
- tree setType typedType(tpt.original, mode).tpe
- else
- // we should get here only when something before failed
- // and we try again (@see tryTypedApply). In that case we can assign
- // whatever type to tree; we just have to survive until a real error message is issued.
- tree setType AnyClass.tpe
- case Import(expr, selectors) =>
- assert(forInteractive, "!forInteractive") // should not happen in normal circumstances.
- tree setType tree.symbol.tpe
- case _ =>
- abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
+ // begin typed1
+ //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
+ tree match {
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: TypeTree => typedTypeTree(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: This => typedThis(tree)
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case tree: DocDef => typedDocDef(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
+ case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
+ case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
+ case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case tree: UnApply => typedUnApply(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
+ case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
}
}
@@ -5243,8 +5367,8 @@ trait Typers extends Modes with Adaptations with Tags {
indentTyping()
var alreadyTyped = false
- val startByType = Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass))
- Statistics.incCounter(visitsByType, tree.getClass)
+ val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
+ if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
@@ -5299,7 +5423,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
finally {
deindentTyping()
- Statistics.popTimer(byTypeStack, startByType)
+ if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
}
}
@@ -5371,7 +5495,7 @@ trait Typers extends Modes with Adaptations with Tags {
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
- context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
}
/** Types a (fully parameterized) type tree */
@@ -5453,7 +5577,7 @@ trait Typers extends Modes with Adaptations with Tags {
val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous))
val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
- if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree.symbol, tree1.symbol) else AnyClass.tpe
+ if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree1.symbol) else AnyClass.tpe
}
def transformedOr(tree: Tree, op: => Tree): Tree = transformed.get(tree) match {
@@ -5476,8 +5600,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
object TypersStats {
- import reflect.internal.TypesStats._
- import reflect.internal.BaseTypeSeqsStats._
+ import scala.reflect.internal.TypesStats._
+ import scala.reflect.internal.BaseTypeSeqsStats._
val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
val typedSelectCount = Statistics.newCounter("#typechecked selections")
val typedApplyCount = Statistics.newCounter("#typechecked applications")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 4871ef199c..5db1863f67 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -115,7 +115,7 @@ trait Unapplies extends ast.TreeDSL
Modifiers(OVERRIDE | FINAL),
nme.toString_,
Nil,
- List(Nil),
+ ListOfNil,
TypeTree(),
Literal(Constant(cdef.name.decode)))
@@ -126,7 +126,7 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, List(Nil), body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 1c7a723f7f..65aba2b721 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -160,9 +160,9 @@ object ClassPath {
override def isValidName(name: String) = !isTraitImplementation(name)
}
- @inline private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
- @inline private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
- @inline private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
+ private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
+ private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
+ private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
/** From the source file to its identifier.
*/
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 90d032518a..34344263e8 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -4,7 +4,7 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
import scala.reflect.internal.util.StringOps._
-import language.implicitConversions
+import scala.language.implicitConversions
object Exceptional {
def unwrap(x: Throwable): Throwable = x match {
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index 25caae9ecb..9de3a2427f 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -14,7 +14,7 @@ import java.net.URL
import scala.reflect.runtime.ReflectionUtils.unwrapHandler
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
-import language.implicitConversions
+import scala.language.implicitConversions
import scala.reflect.{ ClassTag, classTag }
trait HasClassPath {
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
index 8cdb96c586..3682b9fb54 100644
--- a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -6,13 +6,13 @@
package scala.tools.nsc
package util
-import reflect.internal.util.Statistics
+import scala.reflect.internal.util.Statistics
abstract class StatisticsInfo {
val global: Global
import global._
- import reflect.internal.TreesStats.nodeByType
+ import scala.reflect.internal.TreesStats.nodeByType
val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
@@ -35,4 +35,4 @@ abstract class StatisticsInfo {
for (q <- quants if q.showAt(phase.name)) inform(q.line)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 876fb18578..780e3eab88 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -9,7 +9,7 @@ import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter,
package object util {
- implicit def postfixOps = language.postfixOps // make all postfix ops in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
// forwarder for old code that builds against 2.9 and 2.10
val Chars = scala.reflect.internal.Chars
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index f84877cccb..d35ac43424 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -13,34 +13,29 @@ trait FastTrack {
import global._
import definitions._
- import language.implicitConversions
+ import scala.language.implicitConversions
private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
- implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args)
+ implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
- def validate(argss: List[List[Any]]): Boolean = {
- val c = argss.flatten.apply(0).asInstanceOf[MacroContext]
- val isValid = expander isDefinedAt (c, c.expandee)
- isValid
- }
- def run(args: List[Any]): Any = {
- val c = args(0).asInstanceOf[MacroContext]
+ def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
+ def run(c: MacroContext): Any = {
val result = expander((c, c.expandee))
- c.Expr[Nothing](result)(c.AbsTypeTag.Nothing)
+ c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
}
}
lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
var registry = Map[Symbol, FastTrackEntry]()
implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
- MacroInternal_materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeClassTag(u, tt.tpe) }
- MacroInternal_materializeAbsTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
- MacroInternal_materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
- BaseUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
+ materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
+ materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
+ materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
+ ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
registry
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala
new file mode 100644
index 0000000000..f0d3d5973d
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/FrontEnd.scala
@@ -0,0 +1,50 @@
+package scala.tools
+package reflect
+
+import scala.reflect.internal.util.Position
+
+trait FrontEnd {
+ object severity extends Enumeration
+ class Severity(val id: Int) extends severity.Value {
+ var count: Int = 0
+ override def toString() = this match {
+ case INFO => "INFO"
+ case WARNING => "WARNING"
+ case ERROR => "ERROR"
+ case _ => "<unknown>"
+ }
+ }
+ val INFO = new Severity(0)
+ val WARNING = new Severity(1)
+ val ERROR = new Severity(2)
+
+ def hasErrors = ERROR.count > 0
+ def hasWarnings = WARNING.count > 0
+
+ case class Info(val pos: Position, val msg: String, val severity: Severity)
+ val infos = new scala.collection.mutable.LinkedHashSet[Info]
+
+ /** Handles incoming info */
+ def log(pos: Position, msg: String, severity: Severity) {
+ infos += new Info(pos, msg, severity)
+ severity.count += 1
+ display(infos.last)
+ }
+
+ /** Displays incoming info */
+ def display(info: Info): Unit
+
+ /** Services a request to drop into interactive mode */
+ def interactive(): Unit
+
+ /** Refreshes the UI */
+ def flush(): Unit = {}
+
+ /** Resets the reporter */
+ def reset(): Unit = {
+ INFO.count = 0
+ WARNING.count = 0
+ ERROR.count = 0
+ infos.clear()
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/FrontEnds.scala b/src/compiler/scala/tools/reflect/FrontEnds.scala
deleted file mode 100644
index d8f07fb2e5..0000000000
--- a/src/compiler/scala/tools/reflect/FrontEnds.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-package scala.tools
-package reflect
-
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.Settings
-import scala.reflect.ClassTag
-
-trait FrontEnds extends scala.reflect.api.FrontEnds {
-
- type Position = scala.reflect.internal.util.Position
-
- def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd = {
- val settings = new Settings()
- if (minSeverity <= 0) settings.verbose.value = true
- if (minSeverity > 1) settings.nowarn.value = true
- wrapReporter(new ConsoleReporter(settings))
- }
-
- abstract class FrontEndToReporterProxy(val frontEnd: FrontEnd) extends AbstractReporter {
- import frontEnd.{Severity => ApiSeverity}
- val API_INFO = frontEnd.INFO
- val API_WARNING = frontEnd.WARNING
- val API_ERROR = frontEnd.ERROR
-
- type NscSeverity = Severity
- val NSC_INFO = INFO
- val NSC_WARNING = WARNING
- val NSC_ERROR = ERROR
-
- def display(pos: Position, msg: String, nscSeverity: NscSeverity): Unit =
- frontEnd.log(pos, msg, nscSeverity match {
- case NSC_INFO => API_INFO
- case NSC_WARNING => API_WARNING
- case NSC_ERROR => API_ERROR
- })
-
- def displayPrompt(): Unit =
- frontEnd.interactive()
- }
-
- def wrapFrontEnd(frontEnd: FrontEnd): Reporter = new FrontEndToReporterProxy(frontEnd) {
- val settings = new Settings()
- settings.verbose.value = true
- settings.nowarn.value = false
- }
-
- class ReporterToFrontEndProxy(val reporter: Reporter) extends FrontEnd {
- val API_INFO = INFO
- val API_WARNING = WARNING
- val API_ERROR = ERROR
-
- override def hasErrors = reporter.hasErrors
- override def hasWarnings = reporter.hasWarnings
-
- def display(info: Info): Unit = info.severity match {
- case API_INFO => reporter.info(info.pos, info.msg, false)
- case API_WARNING => reporter.warning(info.pos, info.msg)
- case API_ERROR => reporter.error(info.pos, info.msg)
- }
-
- def interactive(): Unit = reporter match {
- case reporter: AbstractReporter => reporter.displayPrompt()
- case _ => // do nothing
- }
-
- override def flush(): Unit = {
- super.flush()
- reporter.flush()
- }
-
- override def reset(): Unit = {
- super.reset()
- reporter.reset()
- }
- }
-
- def wrapReporter(reporter: Reporter): FrontEnd = new ReporterToFrontEndProxy(reporter)
-}
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 40ceefcc70..48a4811744 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -6,11 +6,12 @@ import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
abstract class MacroImplementations {
- val c: Context
+ val c: Context
- import c.universe.{Position => SPosition, _}
+ import c.universe._
+ import definitions._
- def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: SPosition): Tree = {
+ def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: c.universe.Position): Tree = {
// the parts all have the same position information (as the expression is generated by the compiler)
// the args have correct position information
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index 0704189ddc..a3bc9b9bd1 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -3,21 +3,21 @@ package reflect
import java.lang.{Class => jClass}
import scala.reflect.{ClassTag, classTag}
-import scala.reflect.base.{MirrorOf, TypeCreator, Universe => BaseUniverse}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
// [Eugene++] Before 2.10 is released, I suggest we don't rely on automated type tag generation
// sure, it's convenient, but then refactoring reflection / reification becomes a pain
// `ClassTag` tags are fine, because they don't need a reifier to be generated
trait StdTags {
- val u: BaseUniverse with Singleton
- val m: MirrorOf[u.type]
+ val u: ApiUniverse with Singleton
+ val m: Mirror[u.type]
lazy val tagOfListOfString: u.TypeTag[List[String]] =
u.TypeTag[List[String]](
m,
new TypeCreator {
- def apply[U <: BaseUniverse with Singleton](m: MirrorOf[U]): U # Type = {
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type = {
val u = m.universe
val pre = u.ThisType(m.staticPackage("scala.collection.immutable").moduleClass.asInstanceOf[u.Symbol])
u.TypeRef(pre, u.definitions.ListClass, List(u.definitions.StringClass.toTypeConstructor))
@@ -28,7 +28,7 @@ trait StdTags {
u.TypeTag[T](
m,
new TypeCreator {
- def apply[U <: BaseUniverse with Singleton](m: MirrorOf[U]): U # Type =
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
})
lazy val tagOfInt = u.TypeTag.Int
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index 2505c1afb7..ab814b617d 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -1,10 +1,7 @@
package scala.tools
package reflect
-import scala.reflect.api.Universe
-import scala.reflect.base.MirrorOf
-
-trait ToolBox[U <: Universe] {
+trait ToolBox[U <: scala.reflect.api.Universe] {
/** Underlying universe of a ToolBox
*/
@@ -12,12 +9,15 @@ trait ToolBox[U <: Universe] {
/** Underlying mirror of a ToolBox
*/
- val mirror: MirrorOf[u.type]
+ val mirror: u.Mirror
/** Front end of the toolbox.
*
* Accumulates and displays warnings and errors, can drop to interactive mode (if supported).
* The latter can be useful to study the typechecker or to debug complex macros.
+ *
+ * [[scala.tools.reflect]] provides two predefined front ends that can be created using
+ * [[scala.tools.reflect.mkSilentFrontEnd]] and [[scala.tools.reflect.mkConsoleFrontEnd]].
*/
def frontEnd: FrontEnd
@@ -80,18 +80,23 @@ trait ToolBox[U <: Universe] {
def resetLocalAttrs(tree: u.Tree): u.Tree
/** .. */
- def parseExpr(code: String): u.Tree
+ def parse(code: String): u.Tree
- /** Compiles and runs a tree using this ToolBox.
+ /** Compiles a tree using this ToolBox.
*
* If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols),
* then they all have to be resolved first using `Tree.substituteTypes`, or an error occurs.
*
* This spawns the compiler at the Namer phase, and pipelines the tree through that compiler.
- * Currently `runExpr` does not accept trees that already typechecked, because typechecking isn't idempotent.
+ * Currently `compile` does not accept trees that already typechecked, because typechecking isn't idempotent.
* For more info, take a look at https://issues.scala-lang.org/browse/SI-5464.
*/
- def runExpr(tree: u.Tree): Any
+ def compile(tree: u.Tree): () => Any
+
+ /** Compiles and runs a tree using this ToolBox.
+ * Is equivalent to `compile(tree)()`.
+ */
+ def eval(tree: u.Tree): Any
}
/** Represents an error during toolboxing
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index eeec973299..95135b84e0 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -14,10 +14,6 @@ import java.lang.{Class => jClass}
import scala.compat.Platform.EOL
import scala.reflect.NameTransformer
import scala.reflect.api.JavaUniverse
-import scala.reflect.base.MirrorOf
-
-// [Eugene++ to Martin] by the way, toolboxes are unable to compile anything that involves packages
-// is this intentional?
abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
@@ -29,7 +25,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
private class ToolBoxImpl(val frontEnd: FrontEnd, val options: String) extends ToolBox[U] { toolBoxSelf =>
val u: factorySelf.u.type = factorySelf.u
- val mirror: u.Mirror = factorySelf.mirror
+
+ lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
+ lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
@@ -48,7 +46,21 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
newTermName("__wrapper$" + wrapCount + "$" + java.util.UUID.randomUUID.toString.replace("-", ""))
}
- def verifyExpr(expr: Tree): Unit = {
+ // should be called after every use of ToolBoxGlobal in order to prevent leaks
+ // there's the `withCleanupCaches` method defined below, which provides a convenient interface for that
+ def cleanupCaches(): Unit = {
+ perRunCaches.clearAll()
+ undoLog.clear()
+ analyzer.lastTreeToTyper = EmptyTree
+ lastSeenSourceFile = NoSourceFile
+ lastSeenContext = null
+ }
+
+ def withCleanupCaches[T](body: => T): T =
+ try body
+ finally cleanupCaches()
+
+ def verify(expr: Tree): Unit = {
// Previously toolboxes used to typecheck their inputs before compiling.
// Actually, the initial demo by Martin first typechecked the reified tree,
// then ran it, which typechecked it again, and only then launched the
@@ -71,9 +83,17 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
}
- def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
+ def wrapIntoTerm(tree: Tree): Tree =
+ if (!tree.isTerm) Block(List(tree), Literal(Constant(()))) else tree
+
+ def unwrapFromTerm(tree: Tree): Tree = tree match {
+ case Block(List(tree), Literal(Constant(()))) => tree
+ case tree => tree
+ }
+
+ def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
val freeTerms = expr0.freeTerms
- val freeTermNames = collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
+ val freeTermNames = scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
freeTerms foreach (ft => {
var name = ft.name.toString
val namesakes = freeTerms takeWhile (_ != ft) filter (ft2 => ft != ft2 && ft.name == ft2.name)
@@ -98,12 +118,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def transformDuringTyper(expr0: Tree, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
- verifyExpr(expr0)
+ verify(expr0)
// need to wrap the expr, because otherwise you won't be able to typecheck macros against something that contains free vars
var (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = false)
val dummies = freeTerms.map{ case (freeTerm, name) => ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark"))) }.toList
- expr = Block(dummies, expr)
+ expr = Block(dummies, wrapIntoTerm(expr))
// [Eugene] how can we implement that?
// !!! Why is this is in the empty package? If it's only to make
@@ -112,7 +132,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
val owner = ownerClass.newLocalDummy(expr.pos)
- var currentTyper = typer.atOwner(expr, owner)
+ var currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
@@ -138,10 +158,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
}.transform(unwrapped)
new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped)
+ unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped)
unwrapped
}
- def typeCheckExpr(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
+ def typeCheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
@@ -171,10 +192,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
})
- def compileExpr(expr: Tree): (Object, java.lang.reflect.Method) = {
- verifyExpr(expr)
+ def compile(expr0: Tree): () => Any = {
+ val expr = wrapIntoTerm(expr0)
- def wrapExpr(expr0: Tree): Tree = {
+ val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
+ val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
+ verify(expr)
+
+ def wrap(expr0: Tree): ModuleDef = {
val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
@@ -187,10 +212,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName))
def makeParam(schema: (FreeTermSymbol, TermName)) = {
val (fv, name) = schema
- // [Eugene] conventional way of doing this?
- val underlying = fv.tpe.resultType
- val tpe = appliedType(definitions.FunctionClass(0).tpe, List(underlying))
- meth.newValueParameter(name) setInfo tpe
+ meth.newValueParameter(name) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType))
}
meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyClass.tpe)
minfo.decls enter meth
@@ -215,11 +237,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
var cleanedUp = resetLocalAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
- cleanedUp
+ cleanedUp.asInstanceOf[ModuleDef]
}
- val mdef = wrapExpr(expr)
- val pdef = PackageDef(Ident(nme.EMPTY_PACKAGE_NAME), List(mdef))
+ val mdef = wrap(expr)
+ val pdef = PackageDef(Ident(mdef.name), List(mdef))
val unit = new CompilationUnit(NoSourceFile)
unit.body = pdef
@@ -235,12 +257,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get
val singleton = jfield.get(null)
- (singleton, jmeth)
- }
-
- def runExpr(expr: Tree): Any = {
- val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
- val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
// @odersky writes: Not sure we will be able to drop this. I forgot the reason why we dereference () functions,
// but there must have been one. So I propose to leave old version in comments to be resurrected if the problem resurfaces.
@@ -254,13 +270,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// val applyMeth = result.getClass.getMethod("apply")
// applyMeth.invoke(result)
// }
- val (singleton, jmeth) = compileExpr(expr)
- val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
- if (jmeth.getReturnType == java.lang.Void.TYPE) ()
- else result
+ () => {
+ val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
+ if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else result
+ }
}
- def parseExpr(code: String): Tree = {
+ def parse(code: String): Tree = {
val run = new Run
reporter.reset()
val wrappedCode = "object wrapper {" + EOL + code + EOL + "}"
@@ -317,7 +334,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val errorFn: String => Unit = msg => frontEnd.log(scala.reflect.internal.util.NoPosition, msg, frontEnd.ERROR)
val command = new CompilerCommand(arguments.toList, errorFn)
command.settings.outputDirs setSingleOutput virtualDirectory
- val instance = new ToolBoxGlobal(command.settings, new FrontEndToReporterProxy(frontEnd) { val settings = command.settings })
+ val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings))
if (frontEnd.hasErrors) {
var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
msg += frontEnd.infos map (_.msg) mkString EOL
@@ -333,15 +350,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val importer = compiler.mkImporter(u)
lazy val exporter = importer.reverse
- lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, mirror.classLoader)
- def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = {
+ def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches {
if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
var ctree: compiler.Tree = importer.importTree(tree)
var cexpectedType: compiler.Type = importer.importType(expectedType)
if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
- val ttree: compiler.Tree = compiler.typeCheckExpr(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
+ val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
val uttree = exporter.importTree(ttree)
uttree
}
@@ -355,7 +371,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
inferImplicit(tree, viewTpe, isView = true, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
}
- private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = {
+ private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches {
if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos)
var ctree: compiler.Tree = importer.importTree(tree)
var cpt: compiler.Type = importer.importType(pt)
@@ -384,20 +400,22 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
- def parseExpr(code: String): u.Tree = {
+ def parse(code: String): u.Tree = {
if (compiler.settings.verbose.value) println("parsing "+code)
- val ctree: compiler.Tree = compiler.parseExpr(code)
+ val ctree: compiler.Tree = compiler.parse(code)
val utree = exporter.importTree(ctree)
utree
}
- def runExpr(tree: u.Tree): Any = {
+ def compile(tree: u.Tree): () => Any = {
if (compiler.settings.verbose.value) println("importing "+tree)
var ctree: compiler.Tree = importer.importTree(tree)
- if (compiler.settings.verbose.value) println("running "+ctree)
- compiler.runExpr(ctree)
+ if (compiler.settings.verbose.value) println("compiling "+ctree)
+ compiler.compile(ctree)
}
+
+ def eval(tree: u.Tree): Any = compile(tree)()
}
}
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 3008930823..8a1e3628e2 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -6,9 +6,12 @@
package scala.tools
import scala.reflect.api.JavaUniverse
-import language.implicitConversions
+import scala.reflect.internal.util.Position
+import scala.language.implicitConversions
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
-package object reflect extends FrontEnds {
+package object reflect {
// [todo: can we generalize this?
import scala.reflect.runtime.{universe => ru}
implicit def ToolBox(mirror0: ru.Mirror): ToolBoxFactory[ru.type] =
@@ -17,9 +20,92 @@ package object reflect extends FrontEnds {
}
// todo. replace this with an implicit class, once the pesky warning is gone
+ // we don't provide `Eval` for trees, because it's unclear where to get an evaluation mirror from
implicit def Eval[T](expr: JavaUniverse # Expr[T]): Eval[T] = new Eval[T](expr)
- // we don't provide `Eval` for trees, because it's unclear where to get an evaluation mirror from
+ /** Creates a UI-less reporter that simply accumulates all the messages
+ */
+ def mkSilentFrontEnd(): FrontEnd = new FrontEnd {
+ def display(info: Info) {}
+ def interactive() {}
+ }
+
+ /** Creates a reporter that prints messages to the console according to the settings.
+ *
+ * ``minSeverity'' determines minimum severity of the messages to be printed.
+ * 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
+ */
+ // todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
+ def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd = {
+ val settings = new Settings()
+ if (minSeverity <= 0) settings.verbose.value = true
+ if (minSeverity > 1) settings.nowarn.value = true
+ reporterToFrontEnd(new ConsoleReporter(settings))
+ }
+
+ private[reflect] def reporterToFrontEnd(reporter: Reporter): FrontEnd = new FrontEnd {
+ val API_INFO = INFO
+ val API_WARNING = WARNING
+ val API_ERROR = ERROR
+
+ override def hasErrors = reporter.hasErrors
+ override def hasWarnings = reporter.hasWarnings
+
+ def display(info: Info): Unit = info.severity match {
+ case API_INFO => reporter.info(info.pos, info.msg, false)
+ case API_WARNING => reporter.warning(info.pos, info.msg)
+ case API_ERROR => reporter.error(info.pos, info.msg)
+ }
+
+ def interactive(): Unit = reporter match {
+ case reporter: AbstractReporter => reporter.displayPrompt()
+ case _ => // do nothing
+ }
+
+ override def flush(): Unit = {
+ super.flush()
+ reporter.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ reporter.reset()
+ }
+ }
+
+ private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter {
+ val settings = settings0
+
+ import frontEnd.{Severity => ApiSeverity}
+ val API_INFO = frontEnd.INFO
+ val API_WARNING = frontEnd.WARNING
+ val API_ERROR = frontEnd.ERROR
+
+ type NscSeverity = Severity
+ val NSC_INFO = INFO
+ val NSC_WARNING = WARNING
+ val NSC_ERROR = ERROR
+
+ def display(pos: Position, msg: String, nscSeverity: NscSeverity): Unit =
+ frontEnd.log(pos, msg, nscSeverity match {
+ case NSC_INFO => API_INFO
+ case NSC_WARNING => API_WARNING
+ case NSC_ERROR => API_ERROR
+ })
+
+ def displayPrompt(): Unit =
+ frontEnd.interactive()
+
+ override def flush(): Unit = {
+ super.flush()
+ frontEnd.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ frontEnd.reset()
+ }
+ }
}
package reflect {
@@ -27,7 +113,7 @@ package reflect {
def eval: T = {
val factory = new ToolBoxFactory[JavaUniverse](expr.mirror.universe) { val mirror = expr.mirror.asInstanceOf[this.u.Mirror] }
val toolBox = factory.mkToolBox()
- toolBox.runExpr(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
+ toolBox.eval(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
}
}
}
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index ce149a5aa0..70f71a222a 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -12,7 +12,7 @@ import scala.tools.nsc.util.ScalaClassLoader
import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
import scala.tools.nsc.io.File
import Javap._
-import language.reflectiveCalls
+import scala.language.reflectiveCalls
trait Javap {
def loader: ScalaClassLoader
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 739878c282..f6dc92f96e 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -13,7 +13,7 @@ import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
import nsc.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
-import language.postfixOps
+import scala.language.postfixOps
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SW/Classpath
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index e0e089d0b2..d208a9f9c2 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -2,7 +2,7 @@ package scala.tools.util
import scala.tools.nsc.io._
import java.net.URLClassLoader
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
object VerifyClass {
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
index 910ca60eb0..0ad880f26a 100644
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -8,7 +8,7 @@
package scala.util.continuations
-import annotation.{ Annotation, StaticAnnotation, TypeConstraint }
+import scala.annotation.{ Annotation, StaticAnnotation, TypeConstraint }
/** This annotation is used to mark a parameter as part of a continuation
* context.
@@ -101,7 +101,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
@noinline final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
if (fun eq null)
try {
- new ControlContext(null, f(x)) // TODO: only alloc if f(x) != x
+ new ControlContext[A1,B,C](null, f(x)) // TODO: only alloc if f(x) != x
} catch {
case ex: Exception =>
new ControlContext((k: A1 => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
index 54b8fb100e..18944a152d 100644
--- a/src/detach/library/scala/remoting/Channel.scala
+++ b/src/detach/library/scala/remoting/Channel.scala
@@ -116,7 +116,7 @@ class Channel protected (socket: Socket) {
* the expected type.
*/
@throws(classOf[ChannelException])
- def receive[T](implicit expected: reflect.ClassTag[T]): T = {
+ def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = {
val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
info("receive: found="+found+", expected="+expected)
import scala.reflect.ClassTag
@@ -144,11 +144,11 @@ class Channel protected (socket: Socket) {
/** <code>?</code> method may throw either an
* <code>ClassNotFoundException</code> or an <code>IOException</code>.
*/
- def ?[T](implicit t: reflect.ClassTag[T]): T = receive[T](t)
+ def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t)
/** <code>send</code> method may throw an <code>IOException</code>.
*/
- def send[T](x: T)(implicit t: reflect.ClassTag[T]) {
+ def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) {
out writeObject t
x match {
case x: Unit => // nop
@@ -168,7 +168,7 @@ class Channel protected (socket: Socket) {
/** <code>!</code> method may throw an <code>IOException</code>.
*/
- def ![T](x: T)(implicit m: reflect.ClassTag[T]) { send(x)(m) }
+ def ![T](x: T)(implicit m: scala.reflect.ClassTag[T]) { send(x)(m) }
def close() {
try { socket.close() }
diff --git a/src/ensime/.ensime.SAMPLE b/src/ensime/.ensime.SAMPLE
new file mode 100644
index 0000000000..10801816b7
--- /dev/null
+++ b/src/ensime/.ensime.SAMPLE
@@ -0,0 +1,17 @@
+(
+ :disable-source-load-on-startup t
+ :disable-scala-jars-on-classpath t
+ :root-dir "c:/Projects/Kepler"
+ :sources (
+ "c:/Projects/Kepler/src/library"
+ "c:/Projects/Kepler/src/reflect"
+ "c:/Projects/Kepler/src/compiler"
+ )
+ :compile-deps (
+ "c:/Projects/Kepler/build/asm/classes"
+ "c:/Projects/Kepler/build/locker/classes/library"
+ "c:/Projects/Kepler/build/locker/classes/reflect"
+ "c:/Projects/Kepler/build/locker/classes/compiler"
+ )
+ :target "c:/Projects/Kepler/build/classes"
+) \ No newline at end of file
diff --git a/src/ensime/README.md b/src/ensime/README.md
new file mode 100644
index 0000000000..302d47b8a7
--- /dev/null
+++ b/src/ensime/README.md
@@ -0,0 +1,11 @@
+Ensime project files
+=====================
+
+Rename .ensime.SAMPLE to .ensime and replace sample paths with real paths to your sources and build results.
+After that you're good to go with one of the ENSIME-enabled text editors.
+
+Editors that know how to talk to ENSIME servers:
+1) Emacs via https://github.com/aemoncannon/ensime
+2) jEdit via https://github.com/djspiewak/ensime-sidekick
+3) TextMate via https://github.com/mads379/ensime.tmbundle
+4) Sublime Text 2 via https://github.com/sublimescala/sublime-ensime
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 85d2f9075e..a1e5e74e2f 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -22,6 +22,16 @@ import scala.collection.mutable.ListBuffer
*
* `args` returns the current command line arguments as an array.
*
+ * ==Caveats==
+ *
+ * '''''It should be noted that this trait is implemented using the [[DelayedInit]]
+ * functionality, which means that fields of the object will not have been initialized
+ * before the main method has been executed.'''''
+ *
+ * It should also be noted that the `main` method will not normally need to be overridden:
+ * the purpose is to turn the whole class body into the “main method”. You should only
+ * chose to override it if you know what you are doing.
+ *
* @author Martin Odersky
* @version 2.1, 15/02/2011
*/
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 7511b5309d..0b8550be37 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -11,7 +11,7 @@ package scala
import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
-import compat.Platform.arraycopy
+import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
@@ -48,6 +48,16 @@ class FallbackArrayBuilding {
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
+ val emptyBooleanArray = new Array[Boolean](0)
+ val emptyByteArray = new Array[Byte](0)
+ val emptyCharArray = new Array[Char](0)
+ val emptyDoubleArray = new Array[Double](0)
+ val emptyFloatArray = new Array[Float](0)
+ val emptyIntArray = new Array[Int](0)
+ val emptyLongArray = new Array[Long](0)
+ val emptyShortArray = new Array[Short](0)
+ val emptyObjectArray = new Array[Object](0)
+
implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
@@ -511,5 +521,5 @@ final class Array[T](_length: Int) extends java.io.Serializable with java.lang.C
*
* @return A clone of the Array.
*/
- override def clone: Array[T] = throw new Error()
+ override def clone(): Array[T] = throw new Error()
}
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 014928d986..440e546f19 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
diff --git a/src/library/scala/BoxingConversions.scala b/src/library/scala/BoxingConversions.scala
deleted file mode 100644
index fd1bd6c121..0000000000
--- a/src/library/scala/BoxingConversions.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package scala
-abstract class BoxingConversions[Boxed, Unboxed] {
- def box(x: Unboxed): Boxed
- def unbox(x: Boxed): Unboxed
-}
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index 6f54f6cedf..df0d2c73b1 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Byte` are not
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index b681ae1693..1fa0c0d9e8 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Char` are not
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 510de92a2a..f058d7c26b 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Double` are not
diff --git a/src/library/scala/Dynamic.scala b/src/library/scala/Dynamic.scala
index faf834d310..3bcb2f1c90 100644
--- a/src/library/scala/Dynamic.scala
+++ b/src/library/scala/Dynamic.scala
@@ -9,11 +9,11 @@
package scala
/** A marker trait that enables dynamic invocations. Instances `x` of this
- * trait allow method invocations `x.meth(args)` for arbitrary method
- * names `meth` and argument lists `args` as well as field accesses
+ * trait allow method invocations `x.meth(args)` for arbitrary method
+ * names `meth` and argument lists `args` as well as field accesses
* `x.field` for arbitrary field names `field`.
*
- * If a call is not natively supported by `x` (i.e. if type checking
+ * If a call is not natively supported by `x` (i.e. if type checking
* fails), it is rewritten according to the following rules:
*
* {{{
@@ -23,12 +23,12 @@ package scala
* foo.field ~~> foo.selectDynamic("field")
* foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
* foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
- * foo.arr(10) ~~> foo.applyDynamics("arr")(10)
+ * foo.arr(10) ~~> foo.applyDynamic("arr")(10)
* }}}
*
* As of Scala 2.10, defining direct or indirect subclasses of this trait
* is only possible if the language feature `dynamics` is enabled.
*/
-trait Dynamic
+trait Dynamic extends Any
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index b9c116da0b..d942acec23 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Float` are not
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index 270581a3aa..d470f4c966 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -28,11 +28,11 @@ object Function {
/** Turns a function `A => Option[B]` into a `PartialFunction[A, B]`.
*
- * TODO: check if the paragraph below is still correct
* '''Important note''': this transformation implies the original function
- * will be called 2 or more times on each logical invocation, because the
+ * may be called 2 or more times on each logical invocation, because the
* only way to supply an implementation of `isDefinedAt` is to call the
* function and examine the return value.
+ * See also [[scala.PartialFunction]], method `applyOrElse`.
*
* @param f a function `T => Option[R]`
* @return a partial function defined for those inputs where
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index b2a4f93253..ae36413469 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Int` are not
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 40932a65a7..4ee9383c2a 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Long` are not
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index 491cd417a3..7697a7367a 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -12,7 +12,7 @@ import scala.collection.{ mutable, immutable, generic }
import mutable.WrappedArray
import immutable.WrappedString
import generic.CanBuildFrom
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `LowPriorityImplicits` class provides implicit values that
* are valid in all Scala compilation units without explicit qualification,
@@ -27,15 +27,20 @@ class LowPriorityImplicits {
* any potential conflicts. Conflicts do exist because the wrappers
* need to implement ScalaNumber in order to have a symmetric equals
* method, but that implies implementing java.lang.Number as well.
+ *
+ * Note - these are inlined because they are value classes, but
+ * the call to xxxWrapper is not eliminated even though it does nothing.
+ * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
+ * because maybe loading Predef has side effects!
*/
- implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
- implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
- implicit def intWrapper(x: Int) = new runtime.RichInt(x)
- implicit def charWrapper(c: Char) = new runtime.RichChar(c)
- implicit def longWrapper(x: Long) = new runtime.RichLong(x)
- implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
- implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
- implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+ @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
+ @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
+ @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
+ @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
+ @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
+ @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
+ @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
+ @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
// These eight implicits exist solely to exclude Null from the domain of
// the boxed types, so that e.g. "var x: Int = null" is a compile time
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index f651461fe6..880f3f4623 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -10,7 +10,7 @@ package scala
object Option {
- import language.implicitConversions
+ import scala.language.implicitConversions
/** An implicit conversion that converts an option to an iterable value
*/
@@ -196,7 +196,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
/** Necessary to keep $option from being implicitly converted to
* [[scala.collection.Iterable]] in `for` comprehensions.
*/
- def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+ @inline final def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
/** We need a whole WithFilter class to honor the "doesn't create a new
* collection" contract even though it seems unlikely to matter much in a
@@ -246,7 +246,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @return the result of applying `pf` to this $option's
* value (if possible), or $none.
*/
- def collect[B](pf: PartialFunction[A, B]): Option[B] =
+ @inline final def collect[B](pf: PartialFunction[A, B]): Option[B] =
if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
/** Returns this $option if it is nonempty,
@@ -266,7 +266,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* if it is nonempty, or the empty list if the $option is empty.
*/
def toList: List[A] =
- if (isEmpty) List() else List(this.get)
+ if (isEmpty) List() else new ::(this.get, Nil)
/** Returns a [[scala.util.Left]] containing the given
* argument `left` if this $option is empty, or
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 7154b8da34..ce109626cc 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -67,7 +67,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
* of this partial function and `that`. The resulting partial function
* takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not.
*/
- def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] =
+ def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] =
new OrElse[A1, B1] (this, that)
//TODO: why not overload it with orElse(that: F1): F1?
@@ -78,10 +78,8 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
* @return a partial function with the same domain as this partial function, which maps
* arguments `x` to `k(this(x))`.
*/
- override def andThen[C](k: B => C) : PartialFunction[A, C] = new PartialFunction[A, C] {
- def isDefinedAt(x: A): Boolean = self isDefinedAt x
- def apply(x: A): C = k(self(x))
- }
+ override def andThen[C](k: B => C): PartialFunction[A, C] =
+ new AndThen[A, B, C] (this, k)
/** Turns this partial function into an plain function returning an `Option` result.
* @see Function.unlift
@@ -90,28 +88,54 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
*/
def lift: A => Option[B] = new Lifted(this)
- /**
- * TODO: comment
+ /** Applies this partial function to the given argument when it is contained in the function domain.
+ * Applies fallback function where this partial function is not defined.
+ *
+ * Note that expression `pf.applyOrElse(x, default)` is equivalent to
+ * {{{ if(pf isDefinedAt x) pf(x) else default(x) }}}
+ * except that `applyOrElse` method can be implemented more efficiently.
+ * For all partial function literals compiler generates `applyOrElse` implementation which
+ * avoids double evaluation of pattern matchers and guards.
+ * This makes `applyOrElse` the basis for the efficient implementation for many operations and scenarios, such as:
+ *
+ * - combining partial functions into `orElse`/`andThen` chains does not lead to
+ * excessive `apply`/`isDefinedAt` evaluation
+ * - `lift` and `unlift` do not evaluate source functions twice on each invocation
+ * - `runWith` allows efficient imperative-style combining of partial functions
+ * with conditionally applied actions
+ *
+ * For non-literal partial function classes with nontrivial `isDefinedAt` method
+ * it is recommended to override `applyOrElse` with custom implementation that avoids
+ * double `isDefinedAt` evaluation. This may result in better performance
+ * and more predictable behavior w.r.t. side effects.
+ *
+ * @param x the function argument
+ * @param default the fallback function
+ * @return the result of this function or fallback function application.
* @since 2.10
*/
def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
if (isDefinedAt(x)) apply(x) else default(x)
- /**
- * TODO: comment
- * @since 2.10
- */
- def run[U](x: A)(action: B => U): Boolean =
- applyOrElse(x, fallbackToken) match {
- case FallbackToken => false
- case z => action(z); true
- }
-
- /**
- * TODO: comment
+ /** Composes this partial function with an action function which
+ * gets applied to results of this partial function.
+ * The action function is invoked only for its side effects; its result is ignored.
+ *
+ * Note that expression `pf.runWith(action)(x)` is equivalent to
+ * {{{ if(pf isDefinedAt x) { action(pf(x)); true } else false }}}
+ * except that `runWith` is implemented via `applyOrElse` and thus potentially more efficient.
+ * Using `runWith` avoids double evaluation of pattern matchers and guards for partial function literals.
+ * @see `applyOrElse`.
+ *
+ * @param action the action function
+ * @return a function which maps arguments `x` to `isDefinedAt(x)`. The resulting function
+ * runs `action(this(x))` where `this` is defined.
* @since 2.10
*/
- def runWith[U](action: B => U): A => Boolean = { x => run(x)(action) }
+ def runWith[U](action: B => U): A => Boolean = { x =>
+ val z = applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) { action(z); true } else false
+ }
}
/** A few handy operations which leverage the extra bit of information
@@ -132,16 +156,15 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
- private final class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
def apply(x: A): B = f1.applyOrElse(x, f2)
- override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- f1.applyOrElse(x, fallbackToken) match {
- case FallbackToken => f2.applyOrElse(x, default)
- case z => z
- }
+ override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
+ val z = f1.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) z else f2.applyOrElse(x, default)
+ }
override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) =
new OrElse[A1, B1] (f1, f2 orElse that)
@@ -150,23 +173,61 @@ object PartialFunction {
new OrElse[A, C] (f1 andThen k, f2 andThen k)
}
- private[scala] lazy val FallbackToken: PartialFunction[Any, PartialFunction[Any, Nothing]] = { case _ => FallbackToken.asInstanceOf[PartialFunction[Any, Nothing]] }
- private[scala] final def fallbackToken[B] = FallbackToken.asInstanceOf[PartialFunction[Any, B]]
- //TODO: check generated code for PF literal here
+ /** Composite function produced by `PartialFunction#andThen` method
+ */
+ private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
+ def isDefinedAt(x: A) = pf.isDefinedAt(x)
+
+ def apply(x: A): C = k(pf(x))
+
+ override def applyOrElse[A1 <: A, C1 >: C](x: A1, default: A1 => C1): C1 = {
+ val z = pf.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) k(z) else default(x)
+ }
+ }
+
+ /** To implement patterns like {{{ if(pf isDefinedAt x) f1(pf(x)) else f2(x) }}} efficiently
+ * the following trick is used:
+ *
+ * To avoid double evaluation of pattern matchers & guards `applyOrElse` method is used here
+ * instead of `isDefinedAt`/`apply` pair.
+ *
+ * After call to `applyOrElse` we need both the function result it returned and
+ * the fact if the function's argument was contained in its domain. The only degree of freedom we have here
+ * to achieve this goal is tweaking with the continuation argument (`default`) of `applyOrElse` method.
+ * The obvious way is to throw an exception from `default` function and to catch it after
+ * calling `applyOrElse` but I consider this somewhat inefficient.
+ *
+ * I know only one way how you can do this task efficiently: `default` function should return unique marker object
+ * which never may be returned by any other (regular/partial) function. This way after calling `applyOrElse` you need
+ * just one reference comparison to distinguish if `pf isDefined x` or not.
+ *
+ * This correctly interacts with specialization as return type of `applyOrElse`
+ * (which is parameterized upper bound) can never be specialized.
+ *
+ * Here `fallback_pf` is used as both unique marker object and special fallback function that returns it.
+ */
+ private[this] val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf }
+ private def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]]
+ private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
- private[scala] final class Lifted[-A, +B] (val pf: PartialFunction[A, B])
- extends runtime.AbstractFunction1[A, Option[B]] {
+ private class Lifted[-A, +B] (val pf: PartialFunction[A, B])
+ extends scala.runtime.AbstractFunction1[A, Option[B]] {
- def apply(x: A): Option[B] = pf.applyOrElse(x, fallbackToken) match {
- case FallbackToken => None
- case z => Some(z)
+ def apply(x: A): Option[B] = {
+ val z = pf.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) Some(z) else None
}
}
- private final class Unlifted[A, B] (f: A => Option[B]) extends runtime.AbstractPartialFunction[A, B] {
+ private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
def isDefinedAt(x: A): Boolean = f(x).isDefined
- override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- f(x) getOrElse default(x) //TODO: check generated code and inline getOrElse if needed
+
+ override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
+ val z = f(x)
+ if (!z.isEmpty) z.get else default(x)
+ }
+
override def lift = f
}
@@ -178,23 +239,21 @@ object PartialFunction {
/** Converts ordinary function to partial one
* @since 2.10
*/
- //TODO: check generated code for PF literal here
def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) }
- private[this] final val constFalse: Any => Boolean = { _ => false}
+ private[this] val constFalse: Any => Boolean = { _ => false}
- private[this] final val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
+ private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
def isDefinedAt(x: Any) = false
def apply(x: Any) = throw new MatchError(x)
override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
override def andThen[C](k: Nothing => C) = this
override val lift = (x: Any) => None
- override def run[U](x: Any)(action: Nothing => U) = false
override def runWith[U](action: Nothing => U) = constFalse
}
- /**
- * TODO: comment
+ /** The partial function with empty domain.
+ * Any attempt to invoke empty partial function leads to throwing [[scala.MatchError]] exception.
* @since 2.10
*/
def empty[A, B] : PartialFunction[A, B] = empty_pf
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index f3d36473dd..4792de6bf5 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -12,9 +12,9 @@ import scala.collection.{ mutable, immutable, generic }
import immutable.StringOps
import mutable.ArrayOps
import generic.CanBuildFrom
-import annotation.{ elidable, implicitNotFound }
-import annotation.elidable.ASSERTION
-import language.{implicitConversions, existentials}
+import scala.annotation.{ elidable, implicitNotFound }
+import scala.annotation.elidable.ASSERTION
+import scala.language.{implicitConversions, existentials}
/** The `Predef` object provides definitions that are accessible in all Scala
* compilation units without explicit qualification.
@@ -301,7 +301,7 @@ object Predef extends LowPriorityImplicits {
implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x)
implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x)
- implicit def seqToCharSequence(xs: collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
+ implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length)
implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 8c42c60d98..2c6838f6b3 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -35,7 +35,7 @@ trait Product extends Any with Equals {
/** An iterator over all the elements of this product.
* @return in the default implementation, an `Iterator[Any]`
*/
- def productIterator: Iterator[Any] = new collection.AbstractIterator[Any] {
+ def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] {
private var c: Int = 0
private val cmax = productArity
def hasNext = c < cmax
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index 0cb924c3d4..f59aa94bd7 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -12,4 +12,4 @@ package scala
* Annotation for specifying the `static SerialVersionUID` field
* of a serializable class.
*/
-class SerialVersionUID(uid: Long) extends annotation.StaticAnnotation
+class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 687b198a11..35c5fe3ff0 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Short` are not
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 723d95a499..17c1d0d2ba 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -8,10 +8,43 @@
package scala
-/** A class to support string interpolation.
- * This class supports string interpolation as outlined in Scala SIP-11.
- * It needs to be fully documented once the SIP is accepted.
+/** This class provides the basic mechanism to do String Interpolation.
+ * String Interpolation allows users
+ * to embed variable references directly in *processed* string literals.
+ * Here's an example:
+ * {{{
+ * val name = "James"
+ * println(s"Hello, $name") // Hello, James
+ * }}}
*
+ * Any processed string literal is rewritten as an instantiation and
+ * method call against this class. For example:
+ * {{{
+ * s"Hello, $name"
+ * }}}
+ *
+ * is rewritten to be:
+ *
+ * {{{
+ * new StringContext("Hello, ", "").s(name)
+ * }}}
+ *
+ * By default, this class provides the `raw`, `s` and `f` methods as
+ * available interpolators.
+ *
+ * To provide your own string interpolator, create an implicit class
+ * which adds a method to `StringContext`. Here's an example:
+ * {{{
+ * implicit class JsonHelper(val sc: StringContext) extends AnyVal {
+ * def json(args: Any*): JSONObject = ...
+ * }
+ * val x: JSONObject = json"{ a: $a }"
+ * }}}
+ *
+ * Here the `JsonHelper` extenion class implicitly adds the `json` method to
+ * `StringContext` which can be used for `json` string literals.
+ *
+ * @since 2.10.0
* @param parts The parts that make up the interpolated string,
* without the expressions that get inserted by interpolation.
*/
@@ -19,7 +52,7 @@ case class StringContext(parts: String*) {
import StringContext._
- /** Checks that the given arguments `args` number one less than the number
+ /** Checks that the length of the given argument `args` is one less than the number
* of `parts` supplied to the enclosing `StringContext`.
* @param `args` The arguments to be checked.
* @throws An `IllegalArgumentException` if this is not the case.
@@ -33,11 +66,25 @@ case class StringContext(parts: String*) {
*
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
+ * Here's an example of usage:
+ * {{{
+ * val name = "James"
+ * println(s"Hello, $name") // Hello, James
+ * }}}
+ * In this example, the expression $name is replaced with the `toString` of the
+ * variable `name`.
+ * The `s` interpolator can take the `toString` of any arbitrary expression within
+ * a `${}` block, for example:
+ * {{{
+ * println(s"1 + 1 = ${1 + 1}")
+ * }}}
+ * will print the string `1 + 1 = 2`.
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if if a `parts` string contains a backslash (`\`) character
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
def s(args: Any*): String = standardInterpolator(treatEscapes, args)
@@ -47,11 +94,14 @@ case class StringContext(parts: String*) {
* It inserts its arguments between corresponding parts of the string context.
* As opposed to the simple string interpolator `s`, this one does not treat
* standard escape sequences as defined in the Scala specification.
+ *
+ * For example, the raw processed string `raw"a\nb"` is equal to the scala string `"a\\nb"`.
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if if a `parts` string contains a backslash (`\`) character
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
def raw(args: Any*): String = standardInterpolator(identity, args)
@@ -76,6 +126,13 @@ case class StringContext(parts: String*) {
* that starts with a formatting specifier, the expression is formatted according to that
* specifier. All specifiers allowed in Java format strings are handled, and in the same
* way they are treated in Java.
+ *
+ * For example:
+ * {{{
+ * val height = 1.9d
+ * val name = "James"
+ * println(f"$name%s is $height%2.2f meters tall") // James is 1.90 meters tall
+ * }}}
*
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
@@ -96,13 +153,14 @@ case class StringContext(parts: String*) {
* string literally. This is achieved by replacing each such occurrence by the
* format specifier `%%`.
*/
- // The implementation is magically hardwired into `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // Using the mechanism implemented in `scala.tools.reflect.FastTrack`
def f(args: Any*): String = ??? // macro
}
object StringContext {
- /** An exception that is thrown if a string contains a backslash (`\`) character that
+ /** An exception that is thrown if a string contains a backslash (`\`) character
* that does not start a valid escape sequence.
* @param str The offending string
* @param idx The index of the offending backslash character in `str`.
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 4156071f29..dc67e60314 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -10,7 +10,7 @@
package scala
-import language.implicitConversions
+import scala.language.implicitConversions
/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala
index a56129fb96..c3a7f47e62 100644
--- a/src/library/scala/annotation/bridge.scala
+++ b/src/library/scala/annotation/bridge.scala
@@ -11,4 +11,4 @@ package scala.annotation
/** If this annotation is present on a method, it will be treated as a bridge method.
*/
@deprecated("Reconsider whether using this annotation will accomplish anything", "2.10.0")
-private[scala] class bridge extends annotation.StaticAnnotation
+private[scala] class bridge extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/cloneable.scala b/src/library/scala/annotation/cloneable.scala
index aa45e8325f..dc2031ba8d 100644
--- a/src/library/scala/annotation/cloneable.scala
+++ b/src/library/scala/annotation/cloneable.scala
@@ -12,4 +12,4 @@ package scala.annotation
* An annotation that designates the class to which it is applied as cloneable
*/
@deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
-class cloneable extends annotation.StaticAnnotation
+class cloneable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 18be2450f5..0b4f5ac4b2 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -22,7 +22,7 @@ import java.util.logging.Level
* @elidable(123) // annotation priority
* scalac -Xelide-below 456 // command line priority
* }}}
- *
+ *
* The method call will be replaced with an expression which depends on
* the type of the elided expression. In decreasing order of precedence:
*
@@ -33,10 +33,10 @@ import java.util.logging.Level
* T >: Null null
* T >: Nothing Predef.???
* }}}
- *
+ *
* Complete example:
{{{
- import annotation._, elidable._
+ import scala.annotation._, elidable._
object Test extends App {
def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 }
@@ -62,7 +62,7 @@ import java.util.logging.Level
* @author Paul Phillips
* @since 2.8
*/
-final class elidable(final val level: Int) extends annotation.StaticAnnotation {}
+final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation {}
/** This useless appearing code was necessary to allow people to use
* named constants for the elidable annotation. This is what it takes
diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala
index 0c6a5d610f..993e99d382 100644
--- a/src/library/scala/annotation/implicitNotFound.scala
+++ b/src/library/scala/annotation/implicitNotFound.scala
@@ -15,4 +15,4 @@ package scala.annotation
* @author Adriaan Moors
* @since 2.8.1
*/
-final class implicitNotFound(msg: String) extends annotation.StaticAnnotation {} \ No newline at end of file
+final class implicitNotFound(msg: String) extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/meta/beanGetter.scala b/src/library/scala/annotation/meta/beanGetter.scala
index 040a3f415a..48eccf9337 100644
--- a/src/library/scala/annotation/meta/beanGetter.scala
+++ b/src/library/scala/annotation/meta/beanGetter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class beanGetter extends annotation.StaticAnnotation
+final class beanGetter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/beanSetter.scala b/src/library/scala/annotation/meta/beanSetter.scala
index 45ea063169..c9f68449fc 100644
--- a/src/library/scala/annotation/meta/beanSetter.scala
+++ b/src/library/scala/annotation/meta/beanSetter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class beanSetter extends annotation.StaticAnnotation
+final class beanSetter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionClass.scala b/src/library/scala/annotation/meta/companionClass.scala
index 8e53f6caf9..d165f37bad 100644
--- a/src/library/scala/annotation/meta/companionClass.scala
+++ b/src/library/scala/annotation/meta/companionClass.scala
@@ -12,6 +12,6 @@ package scala.annotation.meta
* conversion method for it. Annotations `@companionClass` and `@companionMethod`
* control where an annotation on the implicit class will go. By default, annotations
* on an implicit class end up only on the class.
- *
+ *
*/
-final class companionClass extends annotation.StaticAnnotation
+final class companionClass extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionMethod.scala b/src/library/scala/annotation/meta/companionMethod.scala
index 379c4f3385..c069b47f04 100644
--- a/src/library/scala/annotation/meta/companionMethod.scala
+++ b/src/library/scala/annotation/meta/companionMethod.scala
@@ -12,6 +12,6 @@ package scala.annotation.meta
* conversion method for it. Annotations `@companionClass` and `@companionMethod`
* control where an annotation on the implicit class will go. By default, annotations
* on an implicit class end up only on the class.
- *
+ *
*/
-final class companionMethod extends annotation.StaticAnnotation
+final class companionMethod extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionObject.scala b/src/library/scala/annotation/meta/companionObject.scala
index d329df5c42..5bd58f6365 100644
--- a/src/library/scala/annotation/meta/companionObject.scala
+++ b/src/library/scala/annotation/meta/companionObject.scala
@@ -11,4 +11,4 @@ package scala.annotation.meta
* Currently unused; intended as an annotation target for classes such as case classes
* that automatically generate a companion object
*/
-final class companionObject extends annotation.StaticAnnotation
+final class companionObject extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/field.scala b/src/library/scala/annotation/meta/field.scala
index 78f4a98544..96ed13abc4 100644
--- a/src/library/scala/annotation/meta/field.scala
+++ b/src/library/scala/annotation/meta/field.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class field extends annotation.StaticAnnotation
+final class field extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/getter.scala b/src/library/scala/annotation/meta/getter.scala
index 07e4512f00..0a28a5bb52 100644
--- a/src/library/scala/annotation/meta/getter.scala
+++ b/src/library/scala/annotation/meta/getter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class getter extends annotation.StaticAnnotation
+final class getter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/languageFeature.scala b/src/library/scala/annotation/meta/languageFeature.scala
index 23acc01b51..2e0ddb91cf 100644
--- a/src/library/scala/annotation/meta/languageFeature.scala
+++ b/src/library/scala/annotation/meta/languageFeature.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* An annotation giving particulars for a language feature in object `scala.language`.
*/
-final class languageFeature(feature: String, enableRequired: Boolean) extends annotation.StaticAnnotation
+final class languageFeature(feature: String, enableRequired: Boolean) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/param.scala b/src/library/scala/annotation/meta/param.scala
index d9ebcc76d3..ef535d79c6 100644
--- a/src/library/scala/annotation/meta/param.scala
+++ b/src/library/scala/annotation/meta/param.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class param extends annotation.StaticAnnotation
+final class param extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/setter.scala b/src/library/scala/annotation/meta/setter.scala
index c27cee2985..87ee2e28f4 100644
--- a/src/library/scala/annotation/meta/setter.scala
+++ b/src/library/scala/annotation/meta/setter.scala
@@ -10,4 +10,4 @@ package scala.annotation.meta
/**
* Consult the documentation in package [[scala.annotation.meta]].
*/
-final class setter extends annotation.StaticAnnotation
+final class setter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index 8ab12a7c8e..f60c827620 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -24,7 +24,7 @@ package scala.annotation
*
* @since 2.8
*/
- private[scala] final class migration(message: String, changedIn: String) extends annotation.StaticAnnotation {
+ private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation {
@deprecated("Use the constructor taking two Strings instead.", "2.10")
def this(majorVersion: Int, minorVersion: Int, message: String) = this(message, majorVersion + "." + minorVersion)
- } \ No newline at end of file
+ }
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
index 5a0d1261d6..e300ae9010 100644
--- a/src/library/scala/annotation/serializable.scala
+++ b/src/library/scala/annotation/serializable.scala
@@ -12,4 +12,4 @@ package scala.annotation
* An annotation that designates the class to which it is applied as serializable
*/
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-class serializable extends annotation.StaticAnnotation
+class serializable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/static.scala b/src/library/scala/annotation/static.scala
deleted file mode 100644
index f2955c756c..0000000000
--- a/src/library/scala/annotation/static.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that marks a member in the companion object as static
- * and ensures that the compiler generates static fields/methods for it.
- * This is important for Java interoperability and performance reasons.
- *
- * @since 2.10
- */
-final class static extends StaticAnnotation {
- // TODO document exact semantics above!
-}
diff --git a/src/library/scala/annotation/strictfp.scala b/src/library/scala/annotation/strictfp.scala
index e4efa6e663..4c33ea9678 100644
--- a/src/library/scala/annotation/strictfp.scala
+++ b/src/library/scala/annotation/strictfp.scala
@@ -15,4 +15,4 @@ package scala.annotation
* @version 2.9
* @since 2.9
*/
-class strictfp extends annotation.StaticAnnotation
+class strictfp extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala
index ee068f50d4..a867783455 100644
--- a/src/library/scala/annotation/switch.scala
+++ b/src/library/scala/annotation/switch.scala
@@ -26,4 +26,4 @@ package scala.annotation
* @author Paul Phillips
* @since 2.8
*/
-final class switch extends annotation.StaticAnnotation
+final class switch extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/tailrec.scala b/src/library/scala/annotation/tailrec.scala
index 14775535e8..020f0c4325 100644
--- a/src/library/scala/annotation/tailrec.scala
+++ b/src/library/scala/annotation/tailrec.scala
@@ -16,4 +16,4 @@ package scala.annotation
*
* @since 2.8
*/
-final class tailrec extends annotation.StaticAnnotation
+final class tailrec extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/unchecked/uncheckedStable.scala b/src/library/scala/annotation/unchecked/uncheckedStable.scala
index 13b500fe44..8162a3ab11 100644
--- a/src/library/scala/annotation/unchecked/uncheckedStable.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedStable.scala
@@ -12,4 +12,4 @@ package scala.annotation.unchecked
*
* @since 2.7
*/
-final class uncheckedStable extends annotation.StaticAnnotation {}
+final class uncheckedStable extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
index 51433be79f..61a0ebc6b8 100644
--- a/src/library/scala/annotation/unchecked/uncheckedVariance.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
@@ -12,4 +12,4 @@ package scala.annotation.unchecked
*
* @since 2.7
*/
-final class uncheckedVariance extends annotation.StaticAnnotation {}
+final class uncheckedVariance extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/unspecialized.scala b/src/library/scala/annotation/unspecialized.scala
index 28d9aa169c..717ca1597d 100644
--- a/src/library/scala/annotation/unspecialized.scala
+++ b/src/library/scala/annotation/unspecialized.scala
@@ -14,4 +14,4 @@ package scala.annotation
*
* @since 2.10
*/
-class unspecialized extends annotation.StaticAnnotation
+class unspecialized extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/varargs.scala b/src/library/scala/annotation/varargs.scala
index 1d7a2f7183..b44f8c505e 100644
--- a/src/library/scala/annotation/varargs.scala
+++ b/src/library/scala/annotation/varargs.scala
@@ -14,4 +14,4 @@ package scala.annotation
*
* @since 2.9
*/
-final class varargs extends annotation.StaticAnnotation
+final class varargs extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/BeanDescription.scala b/src/library/scala/beans/BeanDescription.scala
index d6c9b0c736..5e1d3b873e 100644
--- a/src/library/scala/beans/BeanDescription.scala
+++ b/src/library/scala/beans/BeanDescription.scala
@@ -15,5 +15,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanDescription(val description: String) extends annotation.Annotation
+class BeanDescription(val description: String) extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanDisplayName.scala b/src/library/scala/beans/BeanDisplayName.scala
index fbbfa08ffc..2c862e3700 100644
--- a/src/library/scala/beans/BeanDisplayName.scala
+++ b/src/library/scala/beans/BeanDisplayName.scala
@@ -14,5 +14,5 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanDisplayName(val name: String) extends annotation.Annotation
+class BeanDisplayName(val name: String) extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala
index 1a1d8defa4..23a55edfc5 100644
--- a/src/library/scala/beans/BeanInfo.scala
+++ b/src/library/scala/beans/BeanInfo.scala
@@ -17,4 +17,4 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanInfo extends annotation.Annotation
+class BeanInfo extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanInfoSkip.scala b/src/library/scala/beans/BeanInfoSkip.scala
index 23adf74924..f08dde99d9 100644
--- a/src/library/scala/beans/BeanInfoSkip.scala
+++ b/src/library/scala/beans/BeanInfoSkip.scala
@@ -15,4 +15,4 @@ package scala.beans
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
-class BeanInfoSkip extends annotation.Annotation
+class BeanInfoSkip extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanProperty.scala b/src/library/scala/beans/BeanProperty.scala
index 4a2fb716c7..ab63e92c6f 100644
--- a/src/library/scala/beans/BeanProperty.scala
+++ b/src/library/scala/beans/BeanProperty.scala
@@ -23,4 +23,4 @@ package scala.beans
* use the `scala.beans.BooleanBeanProperty` annotation instead.
*/
@scala.annotation.meta.field
-class BeanProperty extends annotation.StaticAnnotation
+class BeanProperty extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/BooleanBeanProperty.scala b/src/library/scala/beans/BooleanBeanProperty.scala
index 1c85a88c84..972d8fb77e 100644
--- a/src/library/scala/beans/BooleanBeanProperty.scala
+++ b/src/library/scala/beans/BooleanBeanProperty.scala
@@ -13,4 +13,4 @@ package scala.beans
* named `isFieldName` instead of `getFieldName`.
*/
@scala.annotation.meta.field
-class BooleanBeanProperty extends annotation.StaticAnnotation
+class BooleanBeanProperty extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index dc634c67d3..a56cb5da59 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -10,7 +10,7 @@ package scala.collection
import parallel.Combiner
-trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Parallelizable[A, ParRepr] {
+trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Any with Parallelizable[A, ParRepr] {
override def par: ParRepr
override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("")
}
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index 9e3927eaf4..142561df20 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -25,6 +25,7 @@ self =>
def iterator: Iterator[B]
override def foreach[U](f: B => U): Unit = iterator foreach f
override def toString = viewToString
+ override def isEmpty = !iterator.hasNext
}
trait EmptyView extends Transformed[Nothing] with super.EmptyView {
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index b6c90d4d2a..3ea45e3810 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -31,7 +31,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
// This hash code must be symmetric in the contents but ought not
// collide trivially.
- override def hashCode() = util.hashing.MurmurHash3.mapHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq)
/** Returns the value associated with a key, or a default value if the key is not contained in the map.
* @param key the key.
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index a77cb05960..6380e9380a 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -116,7 +116,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * @return the index of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
@@ -132,12 +132,12 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
- * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
* @inheritdoc
- *
+ *
* $mayNotTerminateInf
*
*/
@@ -147,7 +147,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * @return the index of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
@@ -163,7 +163,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param elem the element value to search for.
* @param end the end index.
* @tparam B the type of the element `elem`.
- * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
@@ -465,7 +465,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Hashcodes for $Coll produce a value from the hashcodes of all the
* elements of the $coll.
*/
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq)
/** The equals method for arbitrary sequences. Compares this sequence to
* some other object.
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index 18eb31da03..ef5f14ed55 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -127,5 +127,5 @@ extends GenIterableLike[A, Repr]
// Calling map on a set drops duplicates: any hashcode collisions would
// then be dropped before they can be added.
// Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.hashing.MurmurHash3.setHash(seq)
+ override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq)
}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 0d51230623..987f124f55 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -10,7 +10,7 @@ package scala.collection
import generic._
-import annotation.migration
+import scala.annotation.migration
/** A template trait for all traversable collections upon which operations
@@ -65,14 +65,14 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* @throws `NoSuchElementException` if the $coll is empty.
*/
def head: A
-
+
/** Optionally selects the first element.
* $orderDependent
* @return the first element of this $coll if it is nonempty,
* `None` if it is empty.
*/
def headOption: Option[A]
-
+
/** Tests whether this $coll can be repeatedly traversed.
* @return `true`
*/
@@ -92,14 +92,14 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* @throws NoSuchElementException If the $coll is empty.
*/
def last: A
-
+
/** Optionally selects the last element.
* $orderDependent
* @return the last element of this $coll$ if it is nonempty,
* `None` if it is empty.
*/
def lastOption: Option[A]
-
+
/** Selects all elements except the last.
* $orderDependent
* @return a $coll consisting of all elements of this $coll
@@ -107,7 +107,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* @throws `UnsupportedOperationException` if the $coll is empty.
*/
def init: Repr
-
+
/** Computes a prefix scan of the elements of the collection.
*
* Note: The neutral element `z` may be applied more than once.
@@ -210,7 +210,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a function to all elements of this $coll
- * and using the elements of the resulting collections.
+ * and using the elements of the resulting collections.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
@@ -333,7 +333,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Selects first ''n'' elements.
* $orderDependent
- * @param n Tt number of elements to take from this $coll.
+ * @param n the number of elements to take from this $coll.
* @return a $coll consisting only of the first `n` elements of this $coll,
* or else the whole $coll, if it has less than `n` elements.
*/
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 4e0f71ee8b..a872bc0948 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -11,7 +11,7 @@ package scala.collection
import scala.reflect.ClassTag
import scala.collection.generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
-import language.higherKinds
+import scala.language.higherKinds
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
@@ -507,7 +507,7 @@ trait GenTraversableOnce[+A] extends Any {
* $willNotTerminateInf
* @return a buffer containing all elements of this $coll.
*/
- def toBuffer[A1 >: A]: collection.mutable.Buffer[A1]
+ def toBuffer[A1 >: A]: scala.collection.mutable.Buffer[A1]
/** Converts this $coll to an unspecified Traversable. Will return
* the same collection if this instance is already Traversable.
@@ -565,7 +565,7 @@ trait GenTraversableOnce[+A] extends Any {
/** Converts this $coll into another by copying all elements.
* @tparam Col The collection type to build.
* @return a new collection containing all elements of this $coll.
- *
+ *
* @usecase def to[Col[_]]: Col[A]
* @inheritdoc
* $willNotTerminateInf
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index f79a9d2c66..7cac6154b9 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -41,7 +41,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
self =>
def seq: IndexedSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
@@ -90,7 +90,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
override /*IterableLike*/
def iterator: Iterator[A] = new Elements(0, length)
- /** Overridden for efficiency */
+ /* Overridden for efficiency */
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
copyToBuffer(result)
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 9d03a11db9..b471c304ab 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.ArrayBuffer
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 2e9599058f..ead5633e00 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-package scala.collection
-
+package scala
+package collection
import generic._
import immutable.{ List, Stream }
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
@@ -171,7 +171,7 @@ self =>
* fewer elements than size.
*/
def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
-
+
/** Groups elements in fixed size blocks by passing a "sliding window"
* over them (as opposed to partitioning them, as is done in grouped.)
* @see [[scala.collection.Iterator]], method `sliding`
@@ -293,6 +293,7 @@ self =>
override /*TraversableLike*/ def view = new IterableView[A, Repr] {
protected lazy val underlying = self.repr
+ override def isEmpty = self.isEmpty
override def iterator = self.iterator
}
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index e0c8b21d09..d9ccb3f011 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import TraversableView.NoBuilder
import immutable.Stream
-import language.implicitConversions
+import scala.language.implicitConversions
/** A template trait for non-strict views of iterable collections.
* $iterableViewInfo
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 5f369de3b7..e12b8d231c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ArrayBuffer
-import annotation.migration
+import scala.annotation.migration
import immutable.Stream
import scala.collection.generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
@@ -393,7 +394,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def next() = if (hasNext) { hdDefined = false; hd } else empty.next()
}
-
+
/** Tests whether every element of this iterator relates to the
* corresponding element of another collection by satisfying a test predicate.
*
@@ -758,7 +759,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*
* @param elem the element to test.
* @return `true` if this iterator produces some value that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * is equal (as determined by `==`) to `elem`, `false` otherwise.
* @note Reuse: $consumesIterator
*/
def contains(elem: Any): Boolean = exists(_ == elem)
@@ -1140,7 +1141,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toStream: Stream[A] =
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
-
+
/** Converts this iterator to a string.
*
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index a978a9a783..8e4fdf537d 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -22,7 +22,8 @@ import convert._
* scala.collection.mutable.Buffer <=> java.util.List
* scala.collection.mutable.Set <=> java.util.Set
* scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
- * scala.collection.mutable.ConcurrentMap <=> java.util.concurrent.ConcurrentMap
+ * scala.collection.mutable.ConcurrentMap (deprecated since 2.10) <=> java.util.concurrent.ConcurrentMap
+ * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
*}}}
* In all cases, converting from a source type to a target type and back
* again will return the original source object, eg.
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index bfe27ef94a..b873ae964d 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
-import annotation.tailrec
+import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
@@ -50,7 +50,7 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
def seq: LinearSeq[A]
- override def hashCode() = util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
override /*IterableLike*/
def iterator: Iterator[A] = new AbstractIterator[A] {
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 5e0bd010a6..188e0e8afd 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -82,17 +82,16 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
false
}
- override /*TraversableLike*/
- def count(p: A => Boolean): Int = {
+ override /*SeqLike*/
+ def contains(elem: Any): Boolean = {
var these = this
- var cnt = 0
while (!these.isEmpty) {
- if (p(these.head)) cnt += 1
+ if (these.head == elem) return true
these = these.tail
}
- cnt
+ false
}
-
+
override /*IterableLike*/
def find(p: A => Boolean): Option[A] = {
var these = this
@@ -113,7 +112,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
acc
}
-
+
override /*IterableLike*/
def foldRight[B](z: B)(f: (A, B) => B): B =
if (this.isEmpty) z
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index ed2a877631..adc92fa687 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import mutable.{ Builder, MapBuilder }
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.ParMap
/** A template trait for maps, which associate keys with values.
@@ -226,21 +226,21 @@ self =>
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
-
+
protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
-
+
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
-
+
protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
@@ -248,7 +248,7 @@ self =>
override def contains(key: A) = self.contains(key)
def get(key: A) = self.get(key).map(f)
}
-
+
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
@@ -319,7 +319,7 @@ self =>
res
}
- /** Overridden for efficiency. */
+ /* Overridden for efficiency. */
override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
index de9ff93521..20ea7f54b7 100644
--- a/src/library/scala/collection/SeqExtractors.scala
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -11,7 +11,7 @@ object +: {
/** An extractor used to init/last deconstruct sequences. */
object :+ {
/** Splits a sequence into init :+ tail.
- * @return Some(init, tail) if sequence is non-empty. None otherwise.
+ * @return Some((init, tail)) if sequence is non-empty. None otherwise.
*/
def unapply[T,Coll <: SeqLike[T, Coll]](
t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] =
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index d7418de9c3..cda8b1a0e4 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -6,13 +6,14 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import scala.math.Ordering
+import scala.math.{ min, max, Ordering }
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
@@ -92,6 +93,8 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
i - len
}
+ override /*IterableLike*/ def isEmpty: Boolean = lengthCompare(0) == 0
+
/** The size of this $coll, equivalent to `length`.
*
* $willNotTerminateInf
@@ -627,6 +630,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
override def view = new SeqView[A, Repr] {
protected lazy val underlying = self.repr
+ override def isEmpty = self.isEmpty
override def iterator = self.iterator
override def length = self.length
override def apply(idx: Int) = self.apply(idx)
@@ -731,8 +735,8 @@ object SeqLike {
*/
private def kmpSearch[B](S: Seq[B], m0: Int, m1: Int, W: Seq[B], n0: Int, n1: Int, forward: Boolean): Int = {
// Check for redundant case when target has single valid element
- @inline def clipR(x: Int, y: Int) = if (x<y) x else -1
- @inline def clipL(x: Int, y: Int) = if (x>y) x else -1
+ def clipR(x: Int, y: Int) = if (x < y) x else -1
+ def clipL(x: Int, y: Int) = if (x > y) x else -1
if (n1 == n0+1) {
if (forward)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 04ec4af830..a8ffa7b691 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import mutable.{ Builder, SetBuilder }
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.ParSet
/** A template trait for sets.
@@ -78,7 +78,7 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /** Overridden for efficiency. */
+ /* Overridden for efficiency. */
override def toSeq: Seq[A] = toBuffer[A]
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 641dd095da..ce0b130b86 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -10,10 +10,10 @@ package scala.collection
import generic._
import mutable.{ Builder }
-import annotation.{tailrec, migration, bridge}
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.{tailrec, migration, bridge}
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
-import language.higherKinds
+import scala.language.higherKinds
/** A template trait for traversable collections of type `Traversable[A]`.
*
@@ -40,7 +40,7 @@ import language.higherKinds
* a non-strict collection class may defer computation of some of their
* elements until after the instance is available as a value.
* A typical example of a non-strict collection class is a
- * [[scala.collection.immutable/Stream]].
+ * [[scala.collection.immutable.Stream]].
* A more general class of examples are `TraversableViews`.
*
* If a collection is an instance of an ordered collection class, traversing
@@ -86,7 +86,7 @@ trait TraversableLike[+A, +Repr] extends Any
def repr: Repr = this.asInstanceOf[Repr]
final def isTraversableAgain: Boolean = true
-
+
/** The underlying collection seen as an instance of `$Coll`.
* By default this is implemented as the current collection object itself,
* but this can be overridden.
@@ -174,7 +174,7 @@ trait TraversableLike[+A, +Repr] extends Any
*
* @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
* @inheritdoc
- *
+ *
* Example:
* {{{
* scala> val x = List(1)
@@ -235,14 +235,19 @@ trait TraversableLike[+A, +Repr] extends Any
(that ++ seq)(breakOut)
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
- val b = bf(repr)
- b.sizeHint(this)
+ def builder = { // extracted to keep method size under 35 bytes, so that it can be JIT-inlined
+ val b = bf(repr)
+ b.sizeHint(this)
+ b
+ }
+ val b = builder
for (x <- this) b += f(x)
b.result
}
def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
- val b = bf(repr)
+ def builder = bf(repr) // extracted to keep method size under 35 bytes, so that it can be JIT-inlined
+ val b = builder
for (x <- this) b ++= f(x).seq
b.result
}
@@ -266,7 +271,12 @@ trait TraversableLike[+A, +Repr] extends Any
* @return a new $coll consisting of all elements of this $coll that do not satisfy the given
* predicate `p`. The order of the elements is preserved.
*/
- def filterNot(p: A => Boolean): Repr = filter(!p(_))
+ def filterNot(p: A => Boolean): Repr = {
+ val b = newBuilder
+ for (x <- this)
+ if (!p(x)) b += x
+ b.result
+ }
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -491,7 +501,7 @@ trait TraversableLike[+A, +Repr] extends Any
else sliceWithKnownDelta(n, Int.MaxValue, -n)
def slice(from: Int, until: Int): Repr =
- sliceWithKnownBound(math.max(from, 0), until)
+ sliceWithKnownBound(scala.math.max(from, 0), until)
// Precondition: from >= 0, until > 0, builder already configured for building.
private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
@@ -655,6 +665,7 @@ trait TraversableLike[+A, +Repr] extends Any
def view = new TraversableView[A, Repr] {
protected lazy val underlying = self.repr
override def foreach[U](f: A => U) = self foreach f
+ override def isEmpty = self.isEmpty
}
/** Creates a non-strict view of a slice of this $coll.
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index fb73805cc5..d77d278fca 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -10,9 +10,9 @@ package scala.collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
import generic.CanBuildFrom
-import annotation.unchecked.{ uncheckedVariance => uV }
-import language.{implicitConversions, higherKinds}
-import reflect.ClassTag
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
+import scala.language.{implicitConversions, higherKinds}
+import scala.reflect.ClassTag
/** A template trait for collections which can be traversed either once only
* or one or more times.
@@ -374,12 +374,12 @@ object TraversableOnce {
implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
new FlattenOps[A](travs map ev)
-
+
/* Functionality reused in Iterator.CanBuildFrom */
private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
def traversableToColl[B](t: GenTraversable[B]): Coll[B]
-
+
def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
/** Creates a new builder on request of a collection.
@@ -398,7 +398,7 @@ object TraversableOnce {
*/
def apply() = newIterator
}
-
+
/** With the advent of `TraversableOnce`, it can be useful to have a builder which
* operates on `Iterator`s so they can be treated uniformly along with the collections.
* See `scala.util.Random.shuffle` or `scala.concurrent.Future.sequence` for an example.
@@ -407,10 +407,10 @@ object TraversableOnce {
def bufferToColl[B](buff: ArrayBuffer[B]) = buff.iterator
def traversableToColl[B](t: GenTraversable[B]) = t.seq
}
-
+
/** Evidence for building collections from `TraversableOnce` collections */
implicit def OnceCanBuildFrom[A] = new OnceCanBuildFrom[A]
-
+
class FlattenOps[A](travs: TraversableOnce[TraversableOnce[A]]) {
def flatten: Iterator[A] = new AbstractIterator[A] {
val its = travs.toIterator
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 74b30e0faf..b7be87b125 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -12,7 +12,7 @@ package scala.collection
import generic._
import mutable.{Buffer, StringBuilder}
-import reflect.ClassTag
+import scala.reflect.ClassTag
// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index bf4f8205d6..5ee32e90b2 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -11,8 +11,8 @@ package scala.collection
import generic._
import mutable.{ Builder, ArrayBuffer }
import TraversableView.NoBuilder
-import annotation.migration
-import language.implicitConversions
+import scala.annotation.migration
+import scala.language.implicitConversions
trait ViewMkString[+A] {
self: Traversable[A] =>
@@ -59,7 +59,7 @@ trait ViewMkString[+A] {
* $viewInfo
*
* All views for traversable collections are defined by creating a new `foreach` method.
- *
+ *
* @author Martin Odersky
* @version 2.8
* @since 2.8
@@ -162,7 +162,7 @@ trait TraversableViewLike[+A,
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
// else super.flatMap[B, That](f)(bf)
}
- override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
+ override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
newFlatMapped(asTraversable)
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
@@ -193,6 +193,15 @@ trait TraversableViewLike[+A,
override def span(p: A => Boolean): (This, This) = (newTakenWhile(p), newDroppedWhile(p))
override def splitAt(n: Int): (This, This) = (newTaken(n), newDropped(n))
+ // Without this, isEmpty tests go back to the Traversable default, which
+ // involves starting a foreach, which can force the first element of the
+ // view. This is just a backstop - it's overridden at all the "def view"
+ // instantiation points in the collections where the Coll type is known.
+ override def isEmpty = underlying match {
+ case x: GenTraversableOnce[_] => x.isEmpty
+ case _ => super.isEmpty
+ }
+
override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That]
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 3d0aa6fd07..82f62f3c85 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -9,17 +9,14 @@
package scala.collection
package concurrent
-
-
import java.util.concurrent.atomic._
-import collection.immutable.{ ListMap => ImmutableListMap }
-import collection.parallel.mutable.ParTrieMap
-import util.hashing.Hashing
+import scala.collection.immutable.{ ListMap => ImmutableListMap }
+import scala.collection.parallel.mutable.ParTrieMap
+import scala.util.hashing.Hashing
+import scala.util.control.ControlThrowable
import generic._
-import annotation.tailrec
-import annotation.switch
-
-
+import scala.annotation.tailrec
+import scala.annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
@@ -28,13 +25,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
def this(g: Gen) = this(null, g)
- @inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
+ def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
- @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
+ def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
- final def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct)
+ def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct)
- @inline final def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = {
+ def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = {
val m = /*READ*/mainnode
val prevval = /*READ*/m.prev
if (prevval eq null) m
@@ -73,7 +70,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = {
+ def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = {
n.WRITE_PREV(old)
if (CAS(old, n)) {
GCAS_Complete(n, ct)
@@ -81,16 +78,15 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
} else false
}
- @inline
private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2)
- @inline private def inode(cn: MainNode[K, V]) = {
+ private def inode(cn: MainNode[K, V]) = {
val nin = new INode[K, V](gen)
nin.WRITE(cn)
nin
}
- final def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = {
+ def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = {
val nin = new INode[K, V](ngen)
val main = GCAS_READ(ct)
nin.WRITE(main)
@@ -101,7 +97,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*
* @return true if successful, false otherwise
*/
- @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = {
+ @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -147,7 +143,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
* @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
* @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key)
*/
- @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
+ @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -206,7 +202,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
clean(parent, ct, lev - 5)
null
case ln: LNode[K, V] => // 3) an l-node
- @inline def insertln() = {
+ def insertln() = {
val nn = ln.inserted(k, v)
GCAS(ln, nn, ct)
}
@@ -237,7 +233,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*
* @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
*/
- @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = {
+ @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -280,7 +276,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
* @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
* @return null if not successful, an Option[V] indicating the previous value otherwise
*/
- final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
+ def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -364,9 +360,9 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- final def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null
+ def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null
- final def cachedSize(ct: TrieMap[K, V]): Int = {
+ def cachedSize(ct: TrieMap[K, V]): Int = {
val m = GCAS_READ(ct)
m.cachedSize(ct)
}
@@ -451,11 +447,9 @@ extends MainNode[K, V] {
}
-private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen)
-extends CNodeBase[K, V] {
-
+private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] {
// this should only be called from within read-only snapshots
- final def cachedSize(ct: AnyRef) = {
+ def cachedSize(ct: AnyRef) = {
val currsz = READ_SIZE()
if (currsz != -1) currsz
else {
@@ -489,7 +483,7 @@ extends CNodeBase[K, V] {
sz
}
- final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
+ def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val narr = new Array[BasicNode](len)
Array.copy(array, 0, narr, 0, len)
@@ -497,7 +491,7 @@ extends CNodeBase[K, V] {
new CNode[K, V](bitmap, narr, gen)
}
- final def removedAt(pos: Int, flag: Int, gen: Gen) = {
+ def removedAt(pos: Int, flag: Int, gen: Gen) = {
val arr = array
val len = arr.length
val narr = new Array[BasicNode](len - 1)
@@ -506,7 +500,7 @@ extends CNodeBase[K, V] {
new CNode[K, V](bitmap ^ flag, narr, gen)
}
- final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
+ def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val bmp = bitmap
val narr = new Array[BasicNode](len + 1)
@@ -519,7 +513,7 @@ extends CNodeBase[K, V] {
/** Returns a copy of this cnode such that all the i-nodes below it are copied
* to the specified generation `ngen`.
*/
- final def renewed(ngen: Gen, ct: TrieMap[K, V]) = {
+ def renewed(ngen: Gen, ct: TrieMap[K, V]) = {
var i = 0
val arr = array
val len = arr.length
@@ -539,7 +533,7 @@ extends CNodeBase[K, V] {
case _ => inode
}
- final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
+ def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
case sn: SNode[K, V] => sn.copyTombed
case _ => this
} else this
@@ -550,7 +544,7 @@ extends CNodeBase[K, V] {
// returns the version of this node with at least some null-inodes
// removed (those existing when the op began)
// - if there are only null-i-nodes below, returns null
- final def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
+ def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
var bmp = bitmap
var i = 0
val arr = array
@@ -574,7 +568,7 @@ extends CNodeBase[K, V] {
private[concurrent] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
/* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
- protected def collectElems: Seq[(K, V)] = array flatMap {
+ private def collectElems: Seq[(K, V)] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair)
case in: INode[K, V] => in.mainnode match {
case tn: TNode[K, V] => Some(tn.kvPair)
@@ -583,7 +577,7 @@ extends CNodeBase[K, V] {
}
}
- protected def collectLocalElems: Seq[String] = array flatMap {
+ private def collectLocalElems: Seq[String] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
}
@@ -647,22 +641,22 @@ extends scala.collection.concurrent.Map[K, V]
def hashing = hashingobj
def equality = equalityobj
@volatile var root = r
-
+
def this(hashf: Hashing[K], ef: Equiv[K]) = this(
INode.newRootNode,
AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"),
hashf,
ef
)
-
+
def this() = this(Hashing.default, Equiv.universal)
-
+
/* internal methods */
private def writeObject(out: java.io.ObjectOutputStream) {
out.writeObject(hashf)
out.writeObject(ef)
-
+
val it = iterator
while (it.hasNext) {
val (k, v) = it.next()
@@ -678,7 +672,7 @@ extends scala.collection.concurrent.Map[K, V]
hashingobj = in.readObject().asInstanceOf[Hashing[K]]
equalityobj = in.readObject().asInstanceOf[Equiv[K]]
-
+
var obj: AnyRef = null
do {
obj = in.readObject()
@@ -690,11 +684,11 @@ extends scala.collection.concurrent.Map[K, V]
} while (obj != TrieMapSerializationEnd)
}
- @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
- final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
- @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
val r = /*READ*/root
r match {
case in: INode[K, V] => in
@@ -784,9 +778,9 @@ extends scala.collection.concurrent.Map[K, V]
override def empty: TrieMap[K, V] = new TrieMap[K, V]
- final def isReadOnly = rootupdater eq null
+ def isReadOnly = rootupdater eq null
- final def nonReadOnly = rootupdater ne null
+ def nonReadOnly = rootupdater ne null
/** Returns a snapshot of this TrieMap.
* This operation is lock-free and linearizable.
@@ -797,7 +791,7 @@ extends scala.collection.concurrent.Map[K, V]
* TrieMap is distributed across all the threads doing updates or accesses
* subsequent to the snapshot creation.
*/
- @tailrec final def snapshot(): TrieMap[K, V] = {
+ @tailrec def snapshot(): TrieMap[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality)
@@ -816,34 +810,34 @@ extends scala.collection.concurrent.Map[K, V]
*
* This method is used by other methods such as `size` and `iterator`.
*/
- @tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
+ @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality)
else readOnlySnapshot()
}
- @tailrec final override def clear() {
+ @tailrec override def clear() {
val r = RDCSS_READ_ROOT()
if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
}
-
- @inline
+
+
def computeHash(k: K) = hashingobj.hash(k)
-
- final def lookup(k: K): V = {
+
+ def lookup(k: K): V = {
val hc = computeHash(k)
lookuphc(k, hc).asInstanceOf[V]
}
- final override def apply(k: K): V = {
+ override def apply(k: K): V = {
val hc = computeHash(k)
val res = lookuphc(k, hc)
if (res eq null) throw new NoSuchElementException
else res.asInstanceOf[V]
}
- final def get(k: K): Option[V] = {
+ def get(k: K): Option[V] = {
val hc = computeHash(k)
Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
}
@@ -853,22 +847,22 @@ extends scala.collection.concurrent.Map[K, V]
insertifhc(key, hc, value, null)
}
- final override def update(k: K, v: V) {
+ override def update(k: K, v: V) {
val hc = computeHash(k)
inserthc(k, hc, v)
}
- final def +=(kv: (K, V)) = {
+ def +=(kv: (K, V)) = {
update(kv._1, kv._2)
this
}
- final override def remove(k: K): Option[V] = {
+ override def remove(k: K): Option[V] = {
val hc = computeHash(k)
removehc(k, null.asInstanceOf[V], hc)
}
- final def -=(k: K) = {
+ def -=(k: K) = {
remove(k)
this
}
@@ -917,20 +911,20 @@ object TrieMap extends MutableMapFactory[TrieMap] {
implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = new MapCanBuildFrom[K, V]
def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V]
-
+
class MangledHashing[K] extends Hashing[K] {
- def hash(k: K) = util.hashing.byteswap32(k.##)
+ def hash(k: K)= scala.util.hashing.byteswap32(k.##)
}
-
+
}
private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
- var stack = new Array[Array[BasicNode]](7)
- var stackpos = new Array[Int](7)
- var depth = -1
- var subiter: Iterator[(K, V)] = null
- var current: KVNode[K, V] = null
+ private var stack = new Array[Array[BasicNode]](7)
+ private var stackpos = new Array[Int](7)
+ private var depth = -1
+ private var subiter: Iterator[(K, V)] = null
+ private var current: KVNode[K, V] = null
if (mustInit) initialize()
@@ -963,12 +957,12 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
current = null
}
- @inline private def checkSubiter() = if (!subiter.hasNext) {
+ private def checkSubiter() = if (!subiter.hasNext) {
subiter = null
advance()
}
- @inline private def initialize() {
+ private def initialize() {
assert(ct.isReadOnly)
val r = ct.RDCSS_READ_ROOT()
@@ -1058,7 +1052,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
}
-private[concurrent] object RestartException extends util.control.ControlThrowable
+private[concurrent] object RestartException extends ControlThrowable
/** Only used for ctrie serialization. */
@@ -1067,7 +1061,7 @@ private[concurrent] case object TrieMapSerializationEnd
private[concurrent] object Debug {
- import collection._
+ import scala.collection._
lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
@@ -1083,13 +1077,3 @@ private[concurrent] object Debug {
}
}
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index bde13f2830..f999d2fee8 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -12,7 +12,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
import WrapAsJava._
-import language.implicitConversions
+import scala.language.implicitConversions
/** A collection of decorators that allow to convert between
@@ -313,6 +313,6 @@ trait DecorateAsJava {
* @return An object with an `asJava` method that returns a Java
* `ConcurrentMap` view of the argument.
*/
- implicit def asJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
+ implicit def mapAsJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(mapAsJavaConcurrentMap(m))
}
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 539584b148..4ee7e2d1c7 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -12,7 +12,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Decorators._
import WrapAsScala._
-import language.implicitConversions
+import scala.language.implicitConversions
trait DecorateAsScala {
/**
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index fcfe402a68..e427afbb33 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -11,7 +11,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Wrappers._
-import language.implicitConversions
+import scala.language.implicitConversions
trait WrapAsJava {
/**
@@ -253,7 +253,7 @@ trait WrapAsJava {
case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped
case _ => new ConcurrentMapDeprecatedWrapper(m)
}
-
+
/**
* Implicitly converts a Scala mutable `concurrent.Map` to a Java
* `ConcurrentMap`.
@@ -269,7 +269,7 @@ trait WrapAsJava {
* @param m The Scala `concurrent.Map` to be converted.
* @return A Java `ConcurrentMap` view of the argument.
*/
- implicit def asJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
case JConcurrentMapWrapper(wrapped) => wrapped
case _ => new ConcurrentMapWrapper(m)
}
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index 49f4d7cd99..6ef4243d0d 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -11,9 +11,29 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import Wrappers._
-import language.implicitConversions
+import scala.language.implicitConversions
-trait WrapAsScala {
+trait LowPriorityWrapAsScala {
+ this: WrapAsScala =>
+ /**
+ * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
+ * The returned Scala ConcurrentMap is backed by the provided Java
+ * ConcurrentMap and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java ConcurrentMap was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala ConcurrentMap will be returned.
+ *
+ * @param m The ConcurrentMap to be converted.
+ * @return A Scala mutable ConcurrentMap view of the argument.
+ */
+ @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
+ implicit def mapAsScalaDeprecatedConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] =
+ asScalaConcurrentMap(m)
+}
+
+trait WrapAsScala extends LowPriorityWrapAsScala {
/**
* Implicitly converts a Java `Iterator` to a Scala `Iterator`.
*
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 75707b69b0..a459aa15be 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -96,6 +96,9 @@ private[collection] trait Wrappers {
def remove(i: Int) = underlying.remove(i)
def clear() = underlying.clear()
def result = this
+ // Note: Clone cannot just call underlying.clone because in Java, only specific collections
+ // expose clone methods. Generically, they're protected.
+ override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
@@ -149,6 +152,10 @@ private[collection] trait Wrappers {
override def clear() = underlying.clear()
override def empty = JSetWrapper(new ju.HashSet[A])
+ // Note: Clone cannot just call underlying.clone because in Java, only specific collections
+ // expose clone methods. Generically, they're protected.
+ override def clone() =
+ new JSetWrapper[A](new ju.LinkedHashSet[A](underlying))
}
class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
@@ -171,12 +178,12 @@ private[collection] trait Wrappers {
var prev : Option[A] = None
def hasNext = ui.hasNext
-
+
def next() = {
val (k, v) = ui.next
prev = Some(k)
new ju.Map.Entry[A, B] {
- import util.hashing.byteswap32
+ import scala.util.hashing.byteswap32
def getKey = k
def getValue = v
def setValue(v1 : B) = self.put(k, v1)
diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
index 95835d3e90..c9c75a5f23 100644
--- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
+++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
@@ -9,8 +9,8 @@
package scala.collection
package generic
-import language.higherKinds
-import reflect.ClassTag
+import scala.language.higherKinds
+import scala.reflect.ClassTag
/** A template for companion objects of `ClassTagTraversable` and
* subclasses thereof.
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index d79112d616..cebb4e69d3 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -14,7 +14,7 @@ package scala.collection.generic
*/
trait FilterMonadic[+A, +Repr] extends Any {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index 31fe4e100d..6ce99646e8 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index 19eeba9b1d..bb352f707c 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -11,7 +11,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index 4f812b337c..526927ce26 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Set` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 2aaf93de05..6b347db7a0 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
index 8cce592627..fd5a3bae4c 100644
--- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -10,8 +10,8 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
-import reflect.ClassTag
+import scala.language.higherKinds
+import scala.reflect.ClassTag
/** This class represents companions of classes which require ClassTags
* for their element types.
diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
index d368d0007b..d5d6c53c1e 100644
--- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
@@ -10,9 +10,9 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
-import reflect.ClassTag
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
+import scala.reflect.ClassTag
/** This trait represents collections classes which require class
* tags for their element types.
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 1844542315..d4e77f68f5 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of "regular" collection classes
* represent an unconstrained higher-kinded type. Typically
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index 290dc435c8..ba432f012a 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** This class represents companions of classes which require the ordered trait
* for their element types.
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index 6e04420315..b041670161 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -12,8 +12,8 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** This trait represents collections classes which require
* ordered element types.
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index 484da5c6d9..aea7d8f25a 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -11,7 +11,7 @@ package scala.collection.generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of parallel collection classes.
* They should be mixed in together with `GenericCompanion` type.
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index fc1c3f5eaa..3dfdc98133 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -13,8 +13,8 @@ import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
import scala.collection.parallel.TaskSupport
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** A template trait for collections having a companion.
*
@@ -29,7 +29,7 @@ extends GenericTraversableTemplate[A, CC]
{
def companion: GenericCompanion[CC] with GenericParCompanion[CC]
- protected[this] override def newBuilder: collection.mutable.Builder[A, CC[A]] = newCombiner
+ protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner
protected[this] override def newCombiner: Combiner[A, CC[A]] = {
val cb = companion.newCombiner[A]
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 90063c1ca2..63fca78a98 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
trait GenericSeqCompanion[CC[X] <: Traversable[X]]
- extends GenericCompanion[CC] \ No newline at end of file
+ extends GenericCompanion[CC]
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 221bcfb379..cf7259100d 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -8,7 +8,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/**
* @since 2.8
*/
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 7cb0e812d8..62e7061237 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -12,9 +12,9 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.migration
-import annotation.unchecked.uncheckedVariance
-import language.higherKinds
+import scala.annotation.migration
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** A template class for companion objects of ``regular`` collection classes
* that represent an unconstrained higher-kinded type.
@@ -128,7 +128,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* @usecase def flatten[B]: $Coll[B]
*
* @inheritdoc
- *
+ *
* The resulting collection's type will be guided by the
* static type of $coll. For example:
*
@@ -138,7 +138,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
*
* val ys = Set(List(1, 2, 3), List(3, 2, 1))
* // ys == Set(1, 2, 3)
- * }}}
+ * }}}
*/
def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[B] = {
val b = genericBuilder[B]
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index d893188e92..9448222568 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `immutable.Map` and subclasses thereof.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index 7bd5bf2ef8..b6dc85470f 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, SetBuilder }
-import language.higherKinds
+import scala.language.higherKinds
abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index f415a52b4d..19c52b77ed 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -11,7 +11,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `SortedMap` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index 1317bb4796..64f35c35c4 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -11,7 +11,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `SortedSet` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index 7288322903..efa9178740 100644
--- a/src/library/scala/collection/generic/IsTraversableLike.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -42,7 +42,7 @@ trait IsTraversableLike[Repr] {
}
object IsTraversableLike {
- import language.higherKinds
+ import scala.language.higherKinds
implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
new IsTraversableLike[String] {
@@ -50,7 +50,7 @@ object IsTraversableLike {
val conversion = implicitly[String => GenTraversableLike[Char, String]]
}
- implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
new IsTraversableLike[C[A0]] {
type A = A0
val conversion = conv
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
index b336553231..49675b4d5e 100644
--- a/src/library/scala/collection/generic/IsTraversableOnce.scala
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -45,7 +45,7 @@ trait IsTraversableOnce[Repr] {
}
object IsTraversableOnce {
- import language.higherKinds
+ import scala.language.higherKinds
implicit val stringRepr: IsTraversableOnce[String] { type A = Char } =
new IsTraversableOnce[String] {
@@ -53,7 +53,7 @@ object IsTraversableOnce {
val conversion = implicitly[String => GenTraversableOnce[Char]]
}
- implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
new IsTraversableOnce[C[A0]] {
type A = A0
val conversion = conv
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 89b67a6c18..d1ba252ba7 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -11,7 +11,7 @@
package scala.collection.generic
import scala.collection._
-import collection.mutable.Buffer
+import scala.collection.mutable.Buffer
/** This trait implements a forwarder for iterable objects. It forwards
* all calls to a different iterable object, except for
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index ce44ae9bf4..cbf5e06202 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -11,7 +11,7 @@ package generic
import mutable.{Builder, MapBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index 8b38b4ddd5..3b3d6d1946 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `mutable.Map` and subclasses thereof.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index f130489814..516cbd722d 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, GrowingBuilder }
-import language.higherKinds
+import scala.language.higherKinds
abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index 0e90ed999c..e5a69779f3 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -10,7 +10,7 @@ package scala.collection
package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
-import language.higherKinds
+import scala.language.higherKinds
/**
* @define Coll `mutable.SortedSet`
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index 92f166ae08..b3d096ccd2 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
abstract class OrderedTraversableFactory[CC[X] <: Traversable[X] with GenericOrderedTraversableTemplate[X, CC]]
extends GenericOrderedCompanion[CC] {
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 41dca8fbe9..6b59b6671c 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -10,7 +10,7 @@ package scala.collection.generic
import scala.collection.parallel.ParIterable
import scala.collection.parallel.Combiner
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of `ParIterable` and subclasses
* thereof. This class extends `TraversableFactory` and provides a set of
@@ -24,7 +24,7 @@ abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]
extends GenTraversableFactory[CC]
with GenericParCompanion[CC] {
- //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C]
/** A generic implementation of the `CanCombineFrom` trait, which forwards
* all calls to `apply(from)` to the `genericParBuilder` method of the $coll
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 5aedf67924..fdf23581f7 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -12,7 +12,7 @@ import scala.collection.parallel.ParMap
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
/** A template class for companion objects of `ParMap` and subclasses thereof.
* This class extends `TraversableFactory` and provides a set of operations
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 30a36a734a..e6db6f4721 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -8,11 +8,11 @@
package scala.collection.generic
-import collection.mutable.Builder
-import collection.parallel.Combiner
-import collection.parallel.ParSet
-import collection.parallel.ParSetLike
-import language.higherKinds
+import scala.collection.mutable.Builder
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.ParSet
+import scala.collection.parallel.ParSetLike
+import scala.language.higherKinds
/**
* @author Aleksandar Prokopec
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index 3f61de6ceb..e943b93ef0 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index 646e99dd1e..f386596c26 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import language.higherKinds
+import scala.language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
- extends GenSetFactory[CC] with GenericSeqCompanion[CC] \ No newline at end of file
+ extends GenSetFactory[CC] with GenericSeqCompanion[CC]
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 56033ca8d8..af56d06d60 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -32,7 +32,7 @@ private[collection] class SliceInterval private (val from: Int, val until: Int)
*/
def recalculate(_from: Int, _until: Int): SliceInterval = {
val lo = _from max 0
- val elems = math.min(_until - lo, width)
+ val elems = scala.math.min(_until - lo, width)
val start = from + lo
if (elems <= 0) new SliceInterval(from, from)
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index f038c8b09b..2781cbcc15 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of mutable.Map and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index bb261803a9..4abccd3827 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -12,7 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, SetBuilder}
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of Set and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 254a6a224f..a09b92a75b 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import language.higherKinds
+import scala.language.higherKinds
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 5c55c27983..62c1dc095b 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -11,7 +11,7 @@ package scala.collection.generic
import scala.collection._
import mutable.{ Buffer, StringBuilder }
import immutable.{ List, Stream }
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** This trait implements a forwarder for traversable objects. It forwards
* all calls to a different traversable, except for:
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 6eecb5e3ff..dd47b7ace6 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,7 +1,7 @@
package scala.collection
import generic.CanBuildFrom
-import language.higherKinds
+import scala.language.higherKinds
package object generic {
type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To]
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 2cf9985523..ee41e2aa3c 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
-import annotation.unchecked.{ uncheckedVariance=> uV }
+import scala.annotation.unchecked.{ uncheckedVariance=> uV }
import parallel.immutable.ParHashMap
/** This class implements immutable maps using a hash trie.
@@ -73,18 +74,7 @@ class HashMap[A, +B] extends AbstractMap[A, B]
private[collection] def computeHash(key: A) = improve(elemHashCode(key))
- protected type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1);
-
- import HashMap.Merger
-
- protected def liftMerger[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = if (mergef == null) null else new Merger[A1, B1] {
- self =>
- def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv1, kv2)
- val invert: Merger[A1, B1] = new Merger[A1, B1] {
- def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv2, kv1)
- def invert: Merger[A1, B1] = self
- }
- }
+ import HashMap.{Merger, MergeFunction, liftMerger}
private[collection] def get0(key: A, hash: Int, level: Int): Option[B] = None
@@ -99,22 +89,22 @@ class HashMap[A, +B] extends AbstractMap[A, B]
@deprecated("Use the `merged` method instead.", "2.10.0")
def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
+
/** Creates a new map which is the merge of this and the argument hash map.
- *
+ *
* Uses the specified collision resolution function if two keys are the same.
* The collision resolution function will always take the first argument from
* `this` hash map and the second from `that`.
- *
+ *
* The `merged` method is on average more performant than doing a traversal and reconstructing a
* new immutable hash map from scratch, or `++`.
- *
+ *
* @tparam B1 the value type of the other hash map
* @param that the other hash map
* @param mergef the merge function or null if the first key-value pair is to be picked
*/
def merged[B1 >: B](that: HashMap[A, B1])(mergef: MergeFunction[A, B1]): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
+
protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that
override def par = ParHashMap.fromTrie(this)
@@ -129,19 +119,57 @@ class HashMap[A, +B] extends AbstractMap[A, B]
* @since 2.3
*/
object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
-
- private[immutable] abstract class Merger[A, B] {
+
+ private[collection] abstract class Merger[A, B] {
def apply(kv1: (A, B), kv2: (A, B)): (A, B)
def invert: Merger[A, B]
}
-
-
+
+ private type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1)
+
+ private def liftMerger[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] =
+ if (mergef == null) defaultMerger.asInstanceOf[Merger[A1, B1]] else liftMerger0(mergef)
+
+ private[this] val defaultMerger : Merger[Any, Any] = liftMerger0((a,b) => a)
+
+ private[this] def liftMerger0[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = new Merger[A1, B1] {
+ self =>
+ def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv1, kv2)
+ val invert: Merger[A1, B1] = new Merger[A1, B1] {
+ def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv2, kv1)
+ def invert: Merger[A1, B1] = self
+ }
+ }
+
/** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
private object EmptyHashMap extends HashMap[Any, Nothing] { }
+ // utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code)
+ private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = {
+ val index0 = (hash0 >>> level) & 0x1f
+ val index1 = (hash1 >>> level) & 0x1f
+ if(index0 != index1) {
+ val bitmap = (1 << index0) | (1 << index1)
+ val elems = new Array[HashMap[A,B]](2)
+ if(index0 < index1) {
+ elems(0) = elem0
+ elems(1) = elem1
+ } else {
+ elems(0) = elem1
+ elems(1) = elem0
+ }
+ new HashTrieMap[A, B](bitmap, elems, size)
+ } else {
+ val elems = new Array[HashMap[A,B]](1)
+ val bitmap = (1 << index0)
+ elems(0) = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size)
+ new HashTrieMap[A, B](bitmap, elems, size)
+ }
+ }
+
// TODO: add HashMap2, HashMap3, ...
class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
@@ -169,7 +197,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
// }
// }
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash && key == this.key ) {
if (merger eq null) {
if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this
@@ -179,30 +207,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
new HashMap1(nkv._1, hash, nkv._2, nkv)
}
} else {
- var thatindex = (hash >>> level) & 0x1f
- var thisindex = (this.hash >>> level) & 0x1f
if (hash != this.hash) {
// they have different hashes, but may collide at this level - find a level at which they don't
- var lvl = level
- var top: HashTrieMap[A, B1] = null
- var prev: HashTrieMap[A, B1] = null
- while (thisindex == thatindex) {
- val newlevel = new HashTrieMap[A, B1](1 << thisindex, new Array[HashMap[A, B1]](1), 2)
- if (prev ne null) prev.elems(0) = newlevel else top = newlevel
- prev = newlevel
- lvl += 5
- thatindex = (hash >>> lvl) & 0x1f
- thisindex = (this.hash >>> lvl) & 0x1f
- }
- val bottelems = new Array[HashMap[A,B1]](2)
- val ind = if (thisindex < thatindex) 1 else 0
- bottelems(1 - ind) = this
- bottelems(ind) = new HashMap1[A, B1](key, hash, value, kv)
- val bottom = new HashTrieMap[A,B1]((1 << thisindex) | (1 << thatindex), bottelems, 2)
- if (prev ne null) {
- prev.elems(0) = bottom
- top
- } else bottom
+ val that = new HashMap1[A, B1](key, hash, value, kv)
+ makeHashTrieMap[A,B1](this.hash, this, hash, that, level, 2)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
@@ -217,37 +225,38 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
// this method may be called multiple times in a multithreaded environment, but that's ok
private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = {
- that.updated0(key, hash, level, value, kv, if (merger ne null) merger.invert else null)
+ that.updated0(key, hash, level, value, kv, merger.invert)
}
}
private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV])
extends HashMap[A, B @uV] {
+ // assert(kvs.size > 1)
override def size = kvs.size
override def get0(key: A, hash: Int, level: Int): Option[B] =
if (hash == this.hash) kvs.get(key) else None
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash) {
if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value))
else new HashMapCollision1(hash, kvs + merger((key, kvs(key)), kv))
} else {
- var m: HashMap[A,B1] = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0)
- // might be able to save some ops here, but it doesn't seem to be worth it
- for ((k,v) <- kvs)
- m = m.updated0(k, this.hash, level, v, null, merger)
- m.updated0(key, hash, level, value, kv, merger)
+ val that = new HashMap1(key, hash, value, kv)
+ makeHashTrieMap(this.hash, this, hash, that, level, size + 1)
}
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (!kvs1.isEmpty)
- new HashMapCollision1(hash, kvs1)
- else
+ if (kvs1.isEmpty)
HashMap.empty[A,B]
+ else if(kvs1.tail.isEmpty) {
+ val kv = kvs1.head
+ new HashMap1[A,B](kv._1,hash,kv._2,kv)
+ } else
+ new HashMapCollision1(hash, kvs1)
} else this
override def iterator: Iterator[(A,B)] = kvs.iterator
@@ -271,6 +280,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
private[collection] val size0: Int
) extends HashMap[A, B @uV] {
+ // assert(Integer.bitCount(bitmap) == elems.length)
+ // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]]))
+
/*
def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
@@ -304,7 +316,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
None
}
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = {
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
@@ -343,9 +355,14 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
Array.copy(elems, 0, elemsNew, 0, offset)
Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
val sizeNew = size - sub.size
- new HashTrieMap(bitmapNew, elemsNew, sizeNew)
+ if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]])
+ elemsNew(0)
+ else
+ new HashTrieMap(bitmapNew, elemsNew, sizeNew)
} else
HashMap.empty[A,B]
+ } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) {
+ subNew
} else {
val elemsNew = new Array[HashMap[A,B]](elems.length)
Array.copy(elems, 0, elemsNew, 0, elems.length)
@@ -476,7 +493,7 @@ time { mNew.iterator.foreach( p => ()) }
}
new HashTrieMap[A, B1](this.bitmap | that.bitmap, merged, totalelems)
- case hm: HashMapCollision1[_, _] => that.merge0(this, level, if (merger ne null) merger.invert else null)
+ case hm: HashMapCollision1[_, _] => that.merge0(this, level, merger.invert)
case hm: HashMap[_, _] => this
case _ => sys.error("section supposed to be unreachable.")
}
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index c60fdc3bf1..2ebeb044fc 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -8,12 +8,13 @@
-package scala.collection
+package scala
+package collection
package immutable
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import generic._
-import collection.parallel.immutable.ParHashSet
+import scala.collection.parallel.immutable.ParHashSet
/** This class implements immutable sets using a hash trie.
*
@@ -102,6 +103,30 @@ object HashSet extends ImmutableSetFactory[HashSet] {
private object EmptyHashSet extends HashSet[Any] { }
+ // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
+ private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = {
+ val index0 = (hash0 >>> level) & 0x1f
+ val index1 = (hash1 >>> level) & 0x1f
+ if(index0 != index1) {
+ val bitmap = (1 << index0) | (1 << index1)
+ val elems = new Array[HashSet[A]](2)
+ if(index0 < index1) {
+ elems(0) = elem0
+ elems(1) = elem1
+ } else {
+ elems(0) = elem1
+ elems(1) = elem0
+ }
+ new HashTrieSet[A](bitmap, elems, elem0.size + elem1.size)
+ } else {
+ val elems = new Array[HashSet[A]](1)
+ val bitmap = (1 << index0)
+ val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5)
+ elems(0) = child
+ new HashTrieSet[A](bitmap, elems, child.size)
+ }
+ }
+
// TODO: add HashSet2, HashSet3, ...
class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] {
@@ -114,9 +139,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
- //new HashTrieSet[A](level+5, this, new HashSet1(key, hash))
- val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc
- m.updated0(this.key, this.hash, level).updated0(key, hash, level)
+ makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashSetCollision1(hash, ListSet.empty + this.key + key)
@@ -140,21 +163,17 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
- else {
- var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0)
- // might be able to save some ops here, but it doesn't seem to be worth it
- for (k <- ks)
- m = m.updated0(k, this.hash, level)
- m.updated0(key, hash, level)
- }
+ else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
val ks1 = ks - key
- if (!ks1.isEmpty)
- new HashSetCollision1(hash, ks1)
- else
+ if(ks1.isEmpty)
HashSet.empty[A]
+ else if(ks1.tail.isEmpty)
+ new HashSet1(ks1.head, hash)
+ else
+ new HashSetCollision1(hash, ks1)
} else this
override def iterator: Iterator[A] = ks.iterator
@@ -179,6 +198,9 @@ object HashSet extends ImmutableSetFactory[HashSet] {
class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int)
extends HashSet[A] {
+ assert(Integer.bitCount(bitmap) == elems.length)
+ // assertion has to remain disabled until SI-6197 is solved
+ // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]]))
override def size = size0
@@ -236,7 +258,12 @@ object HashSet extends ImmutableSetFactory[HashSet] {
Array.copy(elems, 0, elemsNew, 0, offset)
Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
val sizeNew = size - sub.size
- new HashTrieSet(bitmapNew, elemsNew, sizeNew)
+ // if we have only one child, which is not a HashTrieSet but a self-contained set like
+ // HashSet1 or HashSetCollision1, return the child instead
+ if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieSet[_]])
+ elemsNew(0)
+ else
+ new HashTrieSet(bitmapNew, elemsNew, sizeNew)
} else
HashSet.empty[A]
} else {
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index e895c94599..d0f6b4b3ac 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 74dc385f99..7a489bb100 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
-import annotation.tailrec
+import scala.annotation.tailrec
import java.io._
/** A class for immutable linked lists representing ordered collections
@@ -229,7 +229,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* }}}
*/
override def slice(from: Int, until: Int): List[A] = {
- val lo = math.max(from, 0)
+ val lo = scala.math.max(from, 0)
if (until <= lo || isEmpty) Nil
else this drop lo take (until - lo)
}
@@ -302,6 +302,15 @@ sealed abstract class List[+A] extends AbstractSeq[A]
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
+ @inline override final
+ def foreach[B](f: A => B) {
+ var these = this
+ while (!these.isEmpty) {
+ f(these.head)
+ these = these.tail
+ }
+ }
+
@deprecated("use `distinct` instead", "2.8.0")
def removeDuplicates: List[A] = distinct
}
@@ -321,7 +330,7 @@ case object Nil extends List[Nothing] {
throw new UnsupportedOperationException("tail of empty list")
// Removal of equals method here might lead to an infinite recursion similar to IntMap.equals.
override def equals(that: Any) = that match {
- case that1: collection.GenSeq[_] => that1.isEmpty
+ case that1: scala.collection.GenSeq[_] => that1.isEmpty
case _ => false
}
}
@@ -378,7 +387,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
}
-
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 091443f909..13282101b3 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -12,7 +12,7 @@ package scala.collection
package immutable
import generic._
-import annotation.{tailrec, bridge}
+import scala.annotation.{tailrec, bridge}
/** $factoryInfo
* @since 1
@@ -121,12 +121,12 @@ extends AbstractMap[A, B]
def hasNext = !self.isEmpty
def next(): (A,B) =
if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.next; res }
+ else { val res = (self.key, self.value); self = self.tail; res }
}.toList.reverseIterator
protected def key: A = throw new NoSuchElementException("empty map")
protected def value: B = throw new NoSuchElementException("empty map")
- protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
+ override def tail: ListMap[A, B] = throw new NoSuchElementException("empty map")
/** This class represents an entry in the `ListMap`.
*/
@@ -140,7 +140,7 @@ extends AbstractMap[A, B]
override def size: Int = size0(this, 0)
// to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
+ @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.tail, acc + 1)
/** Is this an empty map?
*
@@ -157,7 +157,7 @@ extends AbstractMap[A, B]
*/
override def apply(k: A): B1 = apply0(this, k)
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.next, k)
+ @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.tail, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
@@ -169,7 +169,7 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
- else if (cur.next.nonEmpty) get0(cur.next, k) else None
+ else if (cur.tail.nonEmpty) get0(cur.tail, k) else None
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
@@ -198,7 +198,7 @@ extends AbstractMap[A, B]
var lst: List[(A, B1)] = Nil
while (cur.nonEmpty) {
if (k != cur.key) lst ::= ((cur.key, cur.value))
- cur = cur.next
+ cur = cur.tail
}
var acc = ListMap[A, B1]()
while (lst != Nil) {
@@ -211,6 +211,6 @@ extends AbstractMap[A, B]
}
- override protected def next: ListMap[A, B1] = ListMap.this
+ override def tail: ListMap[A, B1] = ListMap.this
}
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index ce3abaacb7..6dcdee8938 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -10,7 +10,7 @@ package scala.collection
package immutable
import generic._
-import annotation.{tailrec, bridge}
+import scala.annotation.{tailrec, bridge}
import mutable.{ ListBuffer, Builder }
/** $factoryInfo
@@ -116,8 +116,8 @@ class ListSet[A] extends AbstractSet[A]
def hasNext = that.nonEmpty
def next: A =
if (hasNext) {
- val res = that.elem
- that = that.next
+ val res = that.head
+ that = that.tail
res
}
else Iterator.empty.next
@@ -126,18 +126,18 @@ class ListSet[A] extends AbstractSet[A]
/**
* @throws Predef.NoSuchElementException
*/
- protected def elem: A = throw new NoSuchElementException("Set has no elements");
+ override def head: A = throw new NoSuchElementException("Set has no elements");
/**
* @throws Predef.NoSuchElementException
*/
- protected def next: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+ override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
override def stringPrefix = "ListSet"
/** Represents an entry in the `ListSet`.
*/
- protected class Node(override protected val elem: A) extends ListSet[A] with Serializable {
+ protected class Node(override val head: A) extends ListSet[A] with Serializable {
override private[ListSet] def unchecked_outer = self
/** Returns the number of elements in this set.
@@ -162,7 +162,7 @@ class ListSet[A] extends AbstractSet[A]
*/
override def contains(e: A) = containsInternal(this, e)
@tailrec private def containsInternal(n: ListSet[A], e: A): Boolean =
- !n.isEmpty && (n.elem == e || containsInternal(n.unchecked_outer, e))
+ !n.isEmpty && (n.head == e || containsInternal(n.unchecked_outer, e))
/** This method creates a new set with an additional element.
*/
@@ -170,10 +170,10 @@ class ListSet[A] extends AbstractSet[A]
/** `-` can be used to remove a single element from a set.
*/
- override def -(e: A): ListSet[A] = if (e == elem) self else {
- val tail = self - e; new tail.Node(elem)
+ override def -(e: A): ListSet[A] = if (e == head) self else {
+ val tail = self - e; new tail.Node(head)
}
- override protected def next: ListSet[A] = self
+ override def tail: ListSet[A] = self
}
}
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 002027b162..4899b45d5f 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
@@ -298,7 +299,7 @@ extends AbstractMap[Long, T]
if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
- case LongMap.Tip(key2, value2) =>
+ case LongMap.Tip(key2, value2) =>
if (key == key2) LongMap.Tip(key, f(value2, value))
else join(key, LongMap.Tip(key, value), key2, this)
case LongMap.Nil => LongMap.Tip(key, value)
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index e73da01ac4..17951e73fd 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -76,7 +76,7 @@ object Map extends ImmutableMapFactory[Map] {
def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]]
- class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
+ class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
override def empty = new WithDefault(underlying.empty, d)
override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d)
override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 7d373b7b39..3b4bfdc593 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -13,7 +13,7 @@ package immutable
import java.io._
import scala.util.matching.Regex
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** The `PagedSeq` object defines a lazy implementations of
* a random access sequence.
@@ -99,7 +99,7 @@ object PagedSeq {
/** Constructs a paged character sequence from a scala.io.Source value
*/
- def fromSource(source: io.Source) =
+ def fromSource(source: scala.io.Source) =
fromLines(source.getLines())
}
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index e980dda847..8d82f4932f 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -11,7 +11,7 @@ package immutable
import generic._
import mutable.{ Builder, ListBuffer }
-import annotation.tailrec
+import scala.annotation.tailrec
/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 7607837491..92ea5d3f04 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -43,9 +43,9 @@ import scala.collection.parallel.immutable.ParRange
*/
@SerialVersionUID(7618862778670199309L)
class Range(val start: Int, val end: Int, val step: Int)
-extends collection.AbstractSeq[Int]
+extends scala.collection.AbstractSeq[Int]
with IndexedSeq[Int]
- with collection.CustomParallelizable[Int, ParRange]
+ with scala.collection.CustomParallelizable[Int, ParRange]
with Serializable
{
override def par = new ParRange(this)
@@ -78,19 +78,19 @@ extends collection.AbstractSeq[Int]
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
-
+
override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
if (step > 0) start
else last
} else super.min(ord)
-
- override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
+
+ override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
if (step > 0) last
else start
} else super.max(ord)
-
+
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Create a new range with the `start` and `end` values of this range and
@@ -127,7 +127,7 @@ extends collection.AbstractSeq[Int]
}
}
- @inline final def apply(idx: Int): Int = {
+ final def apply(idx: Int): Int = {
validateMaxLength()
if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
else start + (step * idx)
@@ -346,11 +346,11 @@ object Range {
/** Make an inclusive range from `start` to `end` with given step value.
* @note step != 0
*/
- @inline def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
+ def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
/** Make an inclusive range from `start` to `end` with step value 1.
*/
- @inline def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
+ def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
// BigInt and Long are straightforward generic ranges.
object BigInt {
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 83eeaa45ee..a3ab27f814 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 4b573511d1..bb489dd80a 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -8,11 +8,12 @@
-package scala.collection
+package scala
+package collection
package immutable
-import annotation.tailrec
-import annotation.meta.getter
+import scala.annotation.tailrec
+import scala.annotation.meta.getter
/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
*
@@ -131,6 +132,15 @@ object RedBlackTree {
else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
else tree
}
+ private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1, overwrite: Boolean): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val rank = count(tree.left) + 1
+ if (idx < rank) balanceLeft(isBlackTree(tree), tree.key, tree.value, updNth(tree.left, idx, k, v, overwrite), tree.right)
+ else if (idx > rank) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, updNth(tree.right, idx - rank, k, v, overwrite))
+ else if (overwrite) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
+ }
/* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
* http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
@@ -248,27 +258,27 @@ object RedBlackTree {
else rebalance(tree, newLeft, newRight)
}
- private[this] def doDrop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
if (n <= 0) return tree
if (n >= this.count(tree)) return null
val count = this.count(tree.left)
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
else rebalance(tree, newLeft, tree.right)
}
- private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
if (n <= 0) return null
if (n >= this.count(tree)) return tree
val count = this.count(tree.left)
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
else rebalance(tree, tree.left, newRight)
}
- private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
+ private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
if (tree eq null) return null
val count = this.count(tree.left)
if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1)
@@ -276,8 +286,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value, false)
- else if (newRight eq null) upd(newLeft, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
else rebalance(tree, newLeft, newRight)
}
@@ -379,7 +389,7 @@ object RedBlackTree {
@(inline @getter) final val left: Tree[A, B],
@(inline @getter) final val right: Tree[A, B])
extends Serializable {
- final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
+ @(inline @getter) final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
def black: Tree[A, B]
def red: Tree[A, B]
}
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index f147b673f7..c0a1e0fec9 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -13,7 +13,7 @@ package immutable
import generic._
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
@@ -78,17 +78,17 @@ self =>
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
-
+
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
}
-
+
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
}
-
+
}
/** $factoryInfo
@@ -99,8 +99,8 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
-
- private[collection] trait Default[A, +B] extends SortedMap[A, B] with collection.SortedMap.Default[A, B] {
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] with scala.collection.SortedMap.Default[A, B] {
self =>
override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
val b = SortedMap.newBuilder[A, B1]
@@ -108,7 +108,7 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
b += ((kv._1, kv._2))
b.result
}
-
+
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 9f5f98ddf4..461a375317 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
import scala.annotation.tailrec
import Stream.cons
-import language.implicitConversions
+import scala.language.implicitConversions
/** The class `Stream` implements lazy lists where elements
* are only evaluated when they are needed. Here is an example:
@@ -422,6 +422,9 @@ self =>
* // produces: 10, 10, 11, 10, 11, 11, 10, 11, 11, 12, 10, 11, 11, 12, 13
* }}}
*
+ * ''Note:'' Currently `flatMap` will evaluate as much of the Stream as needed
+ * until it finds a non-empty element for the head, which is non-lazy.
+ *
* @tparam B The element type of the returned collection '''That'''.
* @param f the function to apply on each element.
* @return `f(a,,0,,) ::: ... ::: f(a,,n,,)` if
@@ -479,22 +482,40 @@ self =>
final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailMap = asStream[B](tail withFilter p map f)
- if (isStreamBuilder(bf)) asThat(
- if (isEmpty) Stream.Empty
- else if (p(head)) cons(f(head), tailMap)
- else tailMap
- )
+ def tailMap(coll: Stream[A]): Stream[B] = {
+ var head: A = null.asInstanceOf[A]
+ var tail: Stream[A] = coll
+ while (true) {
+ if (tail.isEmpty)
+ return Stream.Empty
+ head = tail.head
+ tail = tail.tail
+ if (p(head))
+ return cons(f(head), tailMap(tail))
+ }
+ throw new RuntimeException()
+ }
+
+ if (isStreamBuilder(bf)) asThat(tailMap(Stream.this))
else super.map(f)(bf)
}
override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailFlatMap = asStream[B](tail withFilter p flatMap f)
- if (isStreamBuilder(bf)) asThat(
- if (isEmpty) Stream.Empty
- else if (p(head)) f(head).toStream append tailFlatMap
- else tailFlatMap
- )
+ def tailFlatMap(coll: Stream[A]): Stream[B] = {
+ var head: A = null.asInstanceOf[A]
+ var tail: Stream[A] = coll
+ while (true) {
+ if (tail.isEmpty)
+ return Stream.Empty
+ head = tail.head
+ tail = tail.tail
+ if (p(head))
+ return f(head).toStream append tailFlatMap(tail)
+ }
+ throw new RuntimeException()
+ }
+
+ if (isStreamBuilder(bf)) asThat(tailFlatMap(Stream.this))
else super.flatMap(f)(bf)
}
@@ -613,7 +634,7 @@ self =>
* // (5,6)
* }}}
*/
- override final def zip[A1 >: A, B, That](that: collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
+ override final def zip[A1 >: A, B, That](that: scala.collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)]
if (isStreamBuilder(bf)) asThat(
if (this.isEmpty || that.isEmpty) Stream.Empty
@@ -916,6 +937,7 @@ self =>
override def view = new StreamView[A, Stream[A]] {
protected lazy val underlying = self.repr
+ override def isEmpty = self.isEmpty
override def iterator = self.iterator
override def length = self.length
override def apply(idx: Int) = self.apply(idx)
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 3fd92aaff9..236308da2e 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -50,20 +50,20 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
- protected override def newAppended[B >: A](that: collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
- protected override def newZipped[B](that: collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
+ protected override def newZipped[B](that: scala.collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: scala.collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
}
protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: collection.GenSeq[B], _replaced: Int): Transformed[B] = {
+ protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index af7662d2e0..4d28bf9518 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -13,7 +13,7 @@ import generic._
import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** A companion object for the `StringLike` containing some constants.
* @since 2.8
@@ -41,7 +41,7 @@ import StringLike._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-trait StringLike[+Repr] extends Any with collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] {
+trait StringLike[+Repr] extends Any with scala.collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] {
self =>
/** Creates a string builder buffer as builder for this class */
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index ead1a8c744..e8e904f1f9 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -11,7 +11,7 @@ package immutable
import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 }
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import scala.annotation.tailrec
/** Abandons any pretense of type safety for speed. You can't say I
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 4dfe147a65..a33bf2c9c5 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
-import compat.Platform
+import scala.compat.Platform
import scala.collection.generic._
import scala.collection.mutable.Builder
import scala.collection.parallel.immutable.ParVector
@@ -21,14 +22,14 @@ object Vector extends SeqFactory[Vector] {
private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
-
+
private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
-
- @inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
+
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
VectorReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
- @inline override def empty[A]: Vector[A] = NIL
+ override def empty[A]: Vector[A] = NIL
}
// in principle, most members should be private. however, access privileges must
@@ -93,7 +94,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (s.depth > 1) s.gotoPos(startIndex, startIndex ^ focus)
}
- @inline override def iterator: VectorIterator[A] = {
+ override def iterator: VectorIterator[A] = {
val s = new VectorIterator[A](startIndex, endIndex)
initIterator(s)
s
@@ -119,16 +120,6 @@ override def companion: GenericCompanion[Vector] = Vector
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f)
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- val b = bf(repr)
- foreachFast(x => b += f(x))
- b.result
- }
-
-
def apply(index: Int): A = {
val idx = checkRangeConvert(index)
//println("get elem: "+index + "/"+idx + "(focus:" +focus+" xor:"+(idx^focus)+" depth:"+depth+")")
@@ -146,17 +137,17 @@ override def companion: GenericCompanion[Vector] = Vector
// SeqLike api
- @inline override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match {
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match {
case _: Vector.VectorReusableCBF => updateAt(index, elem).asInstanceOf[That] // just ignore bf
case _ => super.updated(index, elem)(bf)
}
- @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match {
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match {
case _: Vector.VectorReusableCBF => appendFront(elem).asInstanceOf[That] // just ignore bf
case _ => super.+:(elem)(bf)
}
- @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match {
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match {
case _: Vector.VectorReusableCBF => appendBack(elem).asInstanceOf[That] // just ignore bf
case _ => super.:+(elem)(bf)
}
@@ -695,9 +686,6 @@ extends AbstractIterator[A]
v.initFrom(this)
v
}
-
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) }
}
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
index eec5f04fff..647fc04310 100644
--- a/src/library/scala/collection/immutable/package.scala
+++ b/src/library/scala/collection/immutable/package.scala
@@ -69,9 +69,9 @@ package immutable {
private def locationAfterN(n: Int) = (
if (n > 0) {
if (step > 0)
- math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
else
- math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
}
else start
)
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 9aea25f330..ad52daaad4 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 3034fc2bce..90b7ca03de 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -66,7 +66,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
val newarray = new Array[AnyRef](len)
- compat.Platform.arraycopy(array, 0, newarray, 0, size0)
+ scala.compat.Platform.arraycopy(array, 0, newarray, 0, size0)
array = newarray
}
}
@@ -93,7 +93,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* @return the updated buffer.
*/
override def ++=(xs: TraversableOnce[A]): this.type = xs match {
- case v: collection.IndexedSeqLike[_, _] =>
+ case v: scala.collection.IndexedSeqLike[_, _] =>
val n = v.length
ensureSize(size0 + n)
v.copyToArray(array.asInstanceOf[scala.Array[Any]], size0, n)
@@ -169,12 +169,6 @@ class ArrayBuffer[A](override protected val initialSize: Int)
result
}
- /** Return a clone of this buffer.
- *
- * @return an `ArrayBuffer` with the same elements.
- */
- override def clone(): ArrayBuffer[A] = new ArrayBuffer[A] ++= this
-
def result: ArrayBuffer[A] = this
/** Defines the prefix of the string representation.
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 04601845c4..172993c5c3 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -22,7 +22,7 @@ import generic._
* @version 2.8
* @since 2.8
*/
-trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
+trait ArrayLike[A, +Repr] extends Any with IndexedSeqOptimized[A, Repr] { self =>
/** Creates a possible nested `IndexedSeq` which consists of all the elements
* of this array. If the elements are arrays themselves, the `deep` transformation
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 21c2aaaec7..397f5bbefa 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -6,18 +6,15 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import compat.Platform.arraycopy
+import scala.compat.Platform.arraycopy
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime._
-
import parallel.mutable.ParArray
-
/** This class serves as a wrapper for `Array`s with all the operations found in
* indexed sequences. Where needed, instances of arrays are implicitly converted
* into this class.
@@ -36,7 +33,7 @@ import parallel.mutable.ParArray
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
+trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
private def elementClass: Class[_] =
arrayElementClass(repr.getClass)
@@ -64,9 +61,9 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U](implicit asTrav: T => collection.Traversable[U], m: ClassTag[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
- b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0}.sum)
+ b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
b ++= asTrav(xs)
b.result
@@ -105,7 +102,7 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
object ArrayOps {
/** A class of `ArrayOps` for arrays containing reference types. */
- class ofRef[T <: AnyRef](override val repr: Array[T]) extends ArrayOps[T] with ArrayLike[T, Array[T]] {
+ final class ofRef[T <: AnyRef](override val repr: Array[T]) extends AnyVal with ArrayOps[T] with ArrayLike[T, Array[T]] {
override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr)
override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr)
@@ -117,7 +114,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `byte`s. */
- class ofByte(override val repr: Array[Byte]) extends ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
+final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr)
override protected[this] def toCollection(repr: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(repr)
@@ -129,7 +126,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `short`s. */
- class ofShort(override val repr: Array[Short]) extends ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
+final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr)
override protected[this] def toCollection(repr: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(repr)
@@ -141,7 +138,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `char`s. */
- class ofChar(override val repr: Array[Char]) extends ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
+final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr)
override protected[this] def toCollection(repr: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(repr)
@@ -153,7 +150,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `int`s. */
- class ofInt(override val repr: Array[Int]) extends ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
+final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr)
override protected[this] def toCollection(repr: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(repr)
@@ -165,7 +162,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `long`s. */
- class ofLong(override val repr: Array[Long]) extends ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
+final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr)
override protected[this] def toCollection(repr: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(repr)
@@ -177,7 +174,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `float`s. */
- class ofFloat(override val repr: Array[Float]) extends ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
+final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr)
override protected[this] def toCollection(repr: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(repr)
@@ -189,7 +186,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `double`s. */
- class ofDouble(override val repr: Array[Double]) extends ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
+final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr)
override protected[this] def toCollection(repr: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(repr)
@@ -201,7 +198,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `boolean`s. */
- class ofBoolean(override val repr: Array[Boolean]) extends ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
+final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
override protected[this] def toCollection(repr: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
@@ -213,7 +210,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays of `Unit` types. */
- class ofUnit(override val repr: Array[Unit]) extends ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
+final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
override protected[this] def toCollection(repr: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index d0eaee348b..60baf7b35b 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -89,6 +89,13 @@ extends AbstractSeq[A]
Array.copy(array, 0, xs, start, len1)
}
+ override def clone(): ArraySeq[A] = {
+ val cloned = array.clone.asInstanceOf[Array[AnyRef]]
+ new ArraySeq[A](length) {
+ override val array = cloned
+ }
+ }
+
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 8f834d265b..277d48c545 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
-import reflect.ClassTag
+import scala.reflect.ClassTag
/** Factory object for the `ArrayStack` class.
*
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3274fe6194..91983ba0d2 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -13,7 +13,7 @@ package mutable
import generic._
import script._
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
/** A template trait for buffers of type `Buffer[A]`.
*
@@ -93,7 +93,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @throws IndexOutOfBoundsException if the index `n` is not in the valid range
* `0 <= n <= length`.
*/
- def insertAll(n: Int, elems: collection.Traversable[A])
+ def insertAll(n: Int, elems: scala.collection.Traversable[A])
/** Removes the element at a given index from this buffer.
*
@@ -252,4 +252,14 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
@migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
+
+ /** Return a clone of this buffer.
+ *
+ * @return a `Buffer` with the same elements.
+ */
+ override def clone(): This = {
+ val bf = newBuilder
+ bf ++= this
+ bf.result.asInstanceOf[This]
+ }
}
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index bbf4f5889d..6dec6b221e 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -62,9 +63,27 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* wrong, i.e. a different number of elements is added.
*
* @param coll the collection which serves as a hint for the result's size.
+ */
+ def sizeHint(coll: TraversableLike[_, _]) {
+ if (coll.isInstanceOf[collection.IndexedSeqLike[_,_]]) {
+ sizeHint(coll.size)
+ }
+ }
+
+ /** Gives a hint that one expects the `result` of this builder
+ * to have the same size as the given collection, plus some delta. This will
+ * provide a hint only if the collection is known to have a cheap
+ * `size` method. Currently this is assumed to be the case if and only if
+ * the collection is of type `IndexedSeqLike`.
+ * Some builder classes
+ * will optimize their representation based on the hint. However,
+ * builder implementations are still required to work correctly even if the hint is
+ * wrong, i.e. a different number of elements is added.
+ *
+ * @param coll the collection which serves as a hint for the result's size.
* @param delta a correction to add to the `coll.size` to produce the size hint.
*/
- def sizeHint(coll: TraversableLike[_, _], delta: Int = 0) {
+ def sizeHint(coll: TraversableLike[_, _], delta: Int) {
if (coll.isInstanceOf[collection.IndexedSeqLike[_,_]]) {
sizeHint(coll.size + delta)
}
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index 6daac3094a..a3c1b7213b 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -18,5 +18,5 @@ package mutable
* @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound.
*/
trait Cloneable[+A <: AnyRef] extends scala.Cloneable {
- override def clone: A = super.clone().asInstanceOf[A]
+ override def clone(): A = super.clone().asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index cba4e9725e..b7c5f07502 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -63,6 +63,13 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
}
override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList
+
+ // Accurately clone this collection. See SI-6296
+ override def clone(): DoubleLinkedList[A] = {
+ val builder = newBuilder
+ builder ++= this
+ builder.result
+ }
}
/** $factoryInfo
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index ebccacf976..feff48cca3 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This extensible class may be used as a basis for implementing double
* linked lists. Type variable `A` refers to the element type
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f6d4cc31b6..74f576b0f7 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -44,7 +44,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
import HashTable.powerOfTwo
@@ -109,7 +109,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- def findEntry(elem: A): Option[A] = {
+ protected def findEntry(elem: A): Option[A] = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -120,7 +120,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Checks whether an element is contained in the hash table. */
- def containsEntry(elem: A): Boolean = {
+ protected def containsEntry(elem: A): Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
@@ -133,7 +133,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** Add entry if not yet in table.
* @return Returns `true` if a new entry was added, `false` otherwise.
*/
- def addEntry(elem: A) : Boolean = {
+ protected def addEntry(elem: A) : Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry) {
@@ -150,7 +150,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- def removeEntry(elem: A) : Option[A] = {
+ protected def removeEntry(elem: A) : Option[A] = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
@@ -185,7 +185,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
None
}
- def iterator: Iterator[A] = new AbstractIterator[A] {
+ protected def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = {
while (i < table.length && (null == table(i))) i += 1
@@ -356,8 +356,8 @@ private[collection] object FlatHashTable {
*
* See SI-5293.
*/
- final def seedGenerator = new ThreadLocal[util.Random] {
- override def initialValue = new util.Random
+ final def seedGenerator = new ThreadLocal[scala.util.Random] {
+ override def initialValue = new scala.util.Random
}
/** The load factor for the hash table; must be < 500 (0.5)
@@ -365,7 +365,7 @@ private[collection] object FlatHashTable {
def defaultLoadFactor: Int = 450
final def loadFactorDenum = 1000
- def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
+ def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
@@ -397,7 +397,7 @@ private[collection] object FlatHashTable {
//h = h + (h << 4)
//h ^ (h >>> 10)
- val improved = util.hashing.byteswap32(hcode)
+ val improved= scala.util.hashing.byteswap32(hcode)
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index bf640cdb90..be85df3c28 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -49,7 +49,7 @@ extends AbstractMap[A, B]
type Entry = DefaultEntry[A, B]
override def empty: HashMap[A, B] = HashMap.empty[A, B]
- override def clear() = clearTable()
+ override def clear() { clearTable() }
override def size: Int = tableSize
def this() = this(null)
@@ -57,22 +57,23 @@ extends AbstractMap[A, B]
override def par = new ParHashMap[A, B](hashTableContents)
// contains and apply overridden to avoid option allocations.
- override def contains(key: A) = findEntry(key) != null
+ override def contains(key: A): Boolean = findEntry(key) != null
+
override def apply(key: A): B = {
val result = findEntry(key)
- if (result == null) default(key)
+ if (result eq null) default(key)
else result.value
}
def get(key: A): Option[B] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -85,9 +86,8 @@ extends AbstractMap[A, B]
}
def += (kv: (A, B)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -98,12 +98,12 @@ extends AbstractMap[A, B]
override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
/* Override to avoid tuple allocation in foreach */
- override def keySet: collection.Set[A] = new DefaultKeySet {
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet {
override def foreach[C](f: A => C) = foreachEntry(e => f(e.key))
}
/* Override to avoid tuple allocation in foreach */
- override def values: collection.Iterable[B] = new DefaultValuesIterable {
+ override def values: scala.collection.Iterable[B] = new DefaultValuesIterable {
override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
}
@@ -127,12 +127,19 @@ extends AbstractMap[A, B]
if (!isSizeMapDefined) sizeMapInitAndRebuild
} else sizeMapDisable
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ new Entry(key, value.asInstanceOf[B])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[B](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index e040d1e421..a5b636c83d 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import collection.parallel.mutable.ParHashSet
+import scala.collection.parallel.mutable.ParHashSet
/** This class implements mutable sets using a hashtable.
*
@@ -53,7 +53,7 @@ extends AbstractSet[A]
override def companion: GenericCompanion[HashSet] = HashSet
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = containsEntry(elem)
@@ -67,7 +67,9 @@ extends AbstractSet[A]
override def remove(elem: A): Boolean = removeEntry(elem).isDefined
- override def clear() = clearTable()
+ override def clear() { clearTable() }
+
+ override def iterator: Iterator[A] = super[FlatHashTable].iterator
override def foreach[U](f: A => U) {
var i = 0
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 97e794f06e..eb6717393b 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -32,6 +32,9 @@ package mutable
* @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
+ // Replacing Entry type parameter by abstract type member here allows to not expose to public
+ // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
+ // However, I'm afraid it's too late now for such breaking change.
import HashTable._
@transient protected var _loadFactor = defaultLoadFactor
@@ -52,7 +55,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
- @transient var seedvalue: Int = tableSizeSeed
+ @transient protected var seedvalue: Int = tableSizeSeed
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
@@ -75,11 +78,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/**
- * Initializes the collection from the input stream. `f` will be called for each key/value pair
- * read from the input stream in the order determined by the stream. This is useful for
- * structures where iteration order is important (e.g. LinkedHashMap).
+ * Initializes the collection from the input stream. `readEntry` will be called for each
+ * entry to be read from the input stream.
*/
- private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
+ private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) {
in.defaultReadObject
_loadFactor = in.readInt()
@@ -100,35 +102,34 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B]))
+ addEntry(readEntry)
index += 1
}
}
/**
* Serializes the collection to the output stream by saving the load factor, collection
- * size, collection keys and collection values. `value` is responsible for providing a value
- * from an entry.
+ * size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
*
- * `foreach` determines the order in which the key/value pairs are saved to the stream. To
+ * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
* deserialize, `init` should be used.
*/
- private[collection] def serializeTo[B](out: java.io.ObjectOutputStream, value: Entry => B) {
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
- foreachEntry { entry =>
- out.writeObject(entry.key)
- out.writeObject(value(entry))
- }
+
+ foreachEntry(writeEntry)
}
/** Find entry with given key in table, null if not found.
*/
- protected def findEntry(key: A): Entry = {
- val h = index(elemHashCode(key))
+ protected def findEntry(key: A): Entry =
+ findEntry0(key, index(elemHashCode(key)))
+
+ private[this] def findEntry0(key: A, h: Int): Entry = {
var e = table(h).asInstanceOf[Entry]
while (e != null && !elemEquals(e.key, key)) e = e.next
e
@@ -138,7 +139,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* pre: no entry with same key exists
*/
protected def addEntry(e: Entry) {
- val h = index(elemHashCode(e.key))
+ addEntry0(e, index(elemHashCode(e.key)))
+ }
+
+ private[this] def addEntry0(e: Entry, h: Int) {
e.next = table(h).asInstanceOf[Entry]
table(h) = e
tableSize = tableSize + 1
@@ -147,6 +151,24 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
resize(2 * table.length)
}
+ /** Find entry with given key in table, or add new one if not found.
+ * May be somewhat faster then `findEntry`/`addEntry` pair as it
+ * computes entry's hash index only once.
+ * Returns entry found in table or null.
+ * New entries are created by calling `createNewEntry` method.
+ */
+ protected def findOrAddEntry[B](key: A, value: B): Entry = {
+ val h = index(elemHashCode(key))
+ val e = findEntry0(key, h)
+ if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
+ }
+
+ /** Creates new entry to be immediately inserted into the hashtable.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
+ */
+ protected def createNewEntry[B](key: A, value: B): Entry
+
/** Remove entry from table if present.
*/
protected def removeEntry(key: A) : Entry = {
@@ -195,7 +217,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/** Avoid iterator for a 2x faster traversal. */
- protected def foreachEntry[C](f: Entry => C) {
+ protected def foreachEntry[U](f: Entry => U) {
val iterTable = table
var idx = lastPopulatedIndex
var es = iterTable(idx)
@@ -401,7 +423,7 @@ private[collection] object HashTable {
*
* For performance reasons, we avoid this improvement.
* */
- val i = util.hashing.byteswap32(hcode)
+ val i= scala.util.hashing.byteswap32(hcode)
/* Jenkins hash
* for range 0-10000, output has the msb set to zero */
@@ -452,7 +474,7 @@ private[collection] object HashTable {
val seedvalue: Int,
val sizemap: Array[Int]
) {
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private[collection] def debugInformation = buildString {
append =>
append("Hash table contents")
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 3232179dbb..1a3b7119a9 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This class can be used as an adaptor to create mutable maps from
* immutable map implementations. Only method `empty` has
@@ -42,17 +42,17 @@ extends AbstractMap[A, B]
override def isDefinedAt(key: A) = imap.isDefinedAt(key)
- override def keySet: collection.Set[A] = imap.keySet
+ override def keySet: scala.collection.Set[A] = imap.keySet
override def keysIterator: Iterator[A] = imap.keysIterator
@migration("`keys` returns Iterable[A] rather than Iterator[A].", "2.8.0")
- override def keys: collection.Iterable[A] = imap.keys
+ override def keys: scala.collection.Iterable[A] = imap.keys
override def valuesIterator: Iterator[B] = imap.valuesIterator
@migration("`values` returns Iterable[B] rather than Iterator[B].", "2.8.0")
- override def values: collection.Iterable[B] = imap.values
+ override def values: scala.collection.Iterable[B] = imap.values
def iterator: Iterator[(A, B)] = imap.iterator
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index 2ff7ac8272..b3fe95ef27 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -36,7 +36,7 @@ import generic._
* @define willNotTerminateInf
* @define mayNotTerminateInf
*/
-trait IndexedSeqLike[A, +Repr] extends scala.collection.IndexedSeqLike[A, Repr] { self =>
+trait IndexedSeqLike[A, +Repr] extends Any with scala.collection.IndexedSeqLike[A, Repr] { self =>
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
@@ -53,6 +53,7 @@ trait IndexedSeqLike[A, +Repr] extends scala.collection.IndexedSeqLike[A, Repr]
*/
override def view = new IndexedSeqView[A, Repr] {
protected lazy val underlying = self.repr
+ override def isEmpty = self.isEmpty
override def iterator = self.iterator
override def length = self.length
override def apply(idx: Int) = self.apply(idx)
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
index 819d06476a..506d2d6736 100755
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -17,4 +17,4 @@ import generic._
*
* @since 2.8
*/
-trait IndexedSeqOptimized[A, +Repr] extends IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
+trait IndexedSeqOptimized[A, +Repr] extends Any with IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index a0de2ec8ad..ab3d0ec312 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
import TraversableView.NoBuilder
-import language.implicitConversions
+import scala.language.implicitConversions
/** A non-strict view of a mutable `IndexedSeq`.
* $viewInfo
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 5643e070f8..5028884a8e 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -67,23 +67,9 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) {
- val e = new Entry(key, value)
- addEntry(e)
- updateLinkedEntries(e)
- None
- } else {
- val v = e.value
- e.value = value
- Some(v)
- }
- }
-
- private def updateLinkedEntries(e: Entry) {
- if (firstEntry == null) firstEntry = e
- else { lastEntry.later = e; e.earlier = lastEntry }
- lastEntry = e
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
+ else { val v = e.value; e.value = value; Some(v) }
}
override def remove(key: A): Option[B] = {
@@ -143,7 +129,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
else Iterator.empty.next
}
- override def foreach[U](f: ((A, B)) => U) = {
+ override def foreach[U](f: ((A, B)) => U) {
var cur = firstEntry
while (cur ne null) {
f((cur.key, cur.value))
@@ -151,7 +137,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
- protected override def foreachEntry[C](f: Entry => C) {
+ protected override def foreachEntry[U](f: Entry => U) {
var cur = firstEntry
while (cur ne null) {
f(cur)
@@ -159,22 +145,29 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ val e = new Entry(key, value.asInstanceOf[B])
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
+
override def clear() {
clearTable()
firstEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
firstEntry = null
lastEntry = null
- init[B](in, { (key, value) =>
- val entry = new Entry(key, value)
- updateLinkedEntries(entry)
- entry
- })
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 3f789f9fa2..26a428c259 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -15,10 +15,9 @@ import generic._
/** This class implements mutable sets using a hashtable.
* The iterator and all traversal methods of this class visit elements in the order they were inserted.
*
- * $cannotStoreNull
- *
* @author Matthias Zenger
* @author Martin Odersky
+ * @author Pavel Pavlov
* @version 2.0, 31/12/2006
* @since 1
*
@@ -43,46 +42,82 @@ class LinkedHashSet[A] extends AbstractSet[A]
with Set[A]
with GenericSetTemplate[A, LinkedHashSet]
with SetLike[A, LinkedHashSet[A]]
- with FlatHashTable[A]
+ with HashTable[A, LinkedHashSet.Entry[A]]
with Serializable
{
override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet
- @transient private[this] var ordered = new ListBuffer[A]
+ type Entry = LinkedHashSet.Entry[A]
+
+ @transient protected var firstEntry: Entry = null
+ @transient protected var lastEntry: Entry = null
- override def size = tableSize
+ override def size: Int = tableSize
- def contains(elem: A): Boolean = containsEntry(elem)
+ def contains(elem: A): Boolean = findEntry(elem) ne null
def += (elem: A): this.type = { add(elem); this }
def -= (elem: A): this.type = { remove(elem); this }
- override def add(elem: A): Boolean =
- if (addEntry(elem)) { ordered += elem; true }
- else false
+ override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null
+
+ override def remove(elem: A): Boolean = {
+ val e = removeEntry(elem)
+ if (e eq null) false
+ else {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ true
+ }
+ }
- override def remove(elem: A): Boolean =
- removeEntry(elem) match {
- case None => false
- case _ => ordered -= elem; true
+ def iterator: Iterator[A] = new AbstractIterator[A] {
+ private var cur = firstEntry
+ def hasNext = cur ne null
+ def next =
+ if (hasNext) { val res = cur.key; cur = cur.later; res }
+ else Iterator.empty.next
+ }
+
+ override def foreach[U](f: A => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key)
+ cur = cur.later
}
+ }
- override def clear() {
- ordered.clear()
- clearTable()
+ protected override def foreachEntry[U](f: Entry => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
}
- override def iterator: Iterator[A] = ordered.iterator
+ protected def createNewEntry[B](key: A, dummy: B): Entry = {
+ val e = new Entry(key)
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
- override def foreach[U](f: A => U) = ordered foreach f
+ override def clear() {
+ clearTable()
+ firstEntry = null
+ }
- private def writeObject(s: java.io.ObjectOutputStream) {
- serializeTo(s)
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, { e => out.writeObject(e.key) })
}
private def readObject(in: java.io.ObjectInputStream) {
- ordered = new ListBuffer[A]
- init(in, ordered += _)
+ firstEntry = null
+ lastEntry = null
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], null))
}
}
@@ -93,5 +128,13 @@ class LinkedHashSet[A] extends AbstractSet[A]
object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A]
override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A]
+
+ /** Class for the linked hash set entry, used internally.
+ * @since 2.10
+ */
+ private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable {
+ var earlier: Entry[A] = null
+ var later: Entry[A] = null
+ }
}
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 07a8501ca4..307836907c 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import annotation.tailrec
+import scala.annotation.tailrec
/** This extensible class may be used as a basis for implementing linked
* list. Type variable `A` refers to the element type of the
@@ -180,4 +180,14 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
these = these.next
}
}
+
+ /** Return a clone of this list.
+ *
+ * @return a `LinkedList` with the same elements.
+ */
+ override def clone(): This = {
+ val bf = newBuilder
+ bf ++= this
+ bf.result
+ }
}
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 207b3f3324..8ae3f20cc8 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -71,7 +71,7 @@ object Map extends MutableMapFactory[Map] {
def empty[A, B]: Map[A, B] = new HashMap[A, B]
- class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends collection.Map.WithDefault(underlying, d) with Map[A, B] {
+ class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault(underlying, d) with Map[A, B] {
override def += (kv: (A, B)) = {underlying += kv; this}
def -= (key: A) = {underlying -= key; this}
override def empty = new WithDefault(underlying.empty, d)
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 3046207533..56be5adcca 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -11,7 +11,7 @@ package scala.collection
package mutable
import generic._
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.mutable.ParMap
/** A template trait for mutable maps.
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index c9e44ac165..6fa1f4872a 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -140,6 +140,13 @@ extends AbstractSeq[A]
}
def result = this
+
+ override def clone(): MutableList[A] = {
+ val bf = newBuilder
+ bf ++= seq
+ bf.result
+ }
+
}
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index 6b5079e402..aaf26327b2 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -70,8 +70,8 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
def undo() { throw new UnsupportedOperationException("cannot undo") }
})
}
-
- abstract override def insertAll(n: Int, elems: collection.Traversable[A]) {
+
+ abstract override def insertAll(n: Int, elems: scala.collection.Traversable[A]) {
super.insertAll(n, elems)
var curr = n - 1
val msg = elems.foldLeft(new Script[A]() with Undoable {
@@ -83,5 +83,5 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
}
publish(msg)
}
-
+
}
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 2634deb819..11055f8986 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
@@ -195,7 +196,7 @@ extends AbstractMap[Key, Value]
}
}
- override def clone = {
+ override def clone() = {
val it = new OpenHashMap[Key, Value]
foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
it
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index e37cbdc712..1fc3928531 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -43,12 +43,12 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
{
import ord._
- private final class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] {
- @inline def p_size0 = size0
- @inline def p_size0_=(s: Int) = size0 = s
- @inline def p_array = array
- @inline def p_ensureSize(n: Int) = super.ensureSize(n)
- @inline def p_swap(a: Int, b: Int) = super.swap(a, b)
+ private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] {
+ def p_size0 = size0
+ def p_size0_=(s: Int) = size0 = s
+ def p_array = array
+ def p_ensureSize(n: Int) = super.ensureSize(n)
+ def p_swap(a: Int, b: Int) = super.swap(a, b)
}
protected[this] override def newBuilder = new PriorityQueue[A]
@@ -166,7 +166,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* Note: The order of elements returned is undefined.
* If you want to traverse the elements in priority queue
* order, use `clone().dequeueAll.iterator`.
- *
+ *
* @return an iterator over all the elements.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -193,7 +193,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return A reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
def compare(x: A, y: A) = ord.compare(y, x)
})
for (i <- 1 until resarr.length) revq += resarr(i)
@@ -204,7 +204,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* than that returned by the method `iterator`.
*
* Note: The order of elements returned is undefined.
- *
+ *
* @return an iterator over all elements sorted in descending order.
*/
def reverseIterator: Iterator[A] = new AbstractIterator[A] {
@@ -236,11 +236,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
-
+
/** Converts this $coll to a list.
*
* Note: the order of elements is undefined.
- *
+ *
* @return a list containing all elements of this $coll.
*/
override def toList = this.iterator.toList
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 2aa19d6cb0..fc7e76125e 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -32,6 +32,7 @@ import generic._
*/
class Queue[A]
extends MutableList[A]
+ with LinearSeqOptimized[A, Queue[A]]
with GenericTraversableTemplate[A, Queue]
with Cloneable[Queue[A]]
with Serializable
@@ -165,6 +166,23 @@ extends MutableList[A]
* @return the first element.
*/
def front: A = head
+
+
+ // TODO - Don't override this just for new to create appropriate type....
+ override def tail: Queue[A] = {
+ require(nonEmpty, "tail of empty list")
+ val tl = new Queue[A]
+ tl.first0 = first0.tail
+ tl.last0 = last0
+ tl.len = len - 1
+ tl
+ }
+
+ override def clone(): Queue[A] = {
+ val bf = newBuilder
+ bf ++= seq
+ bf.result
+ }
}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c7d10573c9..d29ee67580 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -96,7 +97,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
newsize = newsize * 2
val newar: Array[AnyRef] = new Array(newsize)
- compat.Platform.arraycopy(array, 0, newar, 0, size0)
+ scala.compat.Platform.arraycopy(array, 0, newar, 0, size0)
array = newar
}
}
@@ -112,7 +113,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
/** Move parts of the array.
*/
protected def copy(m: Int, n: Int, len: Int) {
- compat.Platform.arraycopy(array, m, array, n, len)
+ scala.compat.Platform.arraycopy(array, m, array, n, len)
}
}
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 582ca898c7..0a95a18392 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -19,7 +19,7 @@ import generic._
* @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: collection.Set[A] with collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
+class SetBuilder[A, Coll <: scala.collection.Set[A] with scala.collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
protected var elems: Coll = empty
def +=(x: A): this.type = { elems = elems + x; this }
def clear() { elems = empty }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 37313c8ca3..38342d4454 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -11,7 +11,7 @@ package mutable
import generic._
import script._
-import annotation.{ migration, bridge }
+import scala.annotation.{ migration, bridge }
import parallel.mutable.ParSet
/** A template trait for mutable sets of type `mutable.Set[A]`.
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 78d12f3d64..809f584f4d 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -20,7 +20,7 @@ import generic._
* @author Lucien Pereira
*
*/
-trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike[A,SortedSet[A]]
+trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.SortedSetLike[A,SortedSet[A]]
with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
/** Needs to be overridden in subclasses. */
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index db9e48d1cf..1ba531ac82 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -12,9 +12,9 @@ package scala.collection
package mutable
import generic._
-import collection.immutable.{List, Nil}
-import collection.Iterator
-import annotation.migration
+import scala.collection.immutable.{List, Nil}
+import scala.collection.Iterator
+import scala.annotation.migration
/** Factory object for the `mutable.Stack` class.
*
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 08c881dbb8..92506548e9 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -10,7 +10,7 @@ package scala.collection
package mutable
import java.lang.{ StringBuilder => JavaStringBuilder }
-import annotation.migration
+import scala.annotation.migration
import immutable.StringLike
/** A builder for mutable sequence of characters. This class provides an API
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index c9ae87d558..83192124af 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -11,7 +11,7 @@ package mutable
/** `Subscriber[A, B]` objects may subscribe to events of type `A`
* published by an object of type `B`. `B` is typically a subtype of
- * [[scala.collection.immutable.Publisher]].
+ * [[scala.collection.mutable.Publisher]].
*
* @author Matthias Zenger
* @author Martin Odersky
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 037b8ec5f5..6b3264a66d 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This class should be used as a mixin. It synchronizes the `Map`
* functions of the class into which it is mixed in.
@@ -41,14 +41,14 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
@migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0")
- override def values: collection.Iterable[B] = synchronized { super.values }
+ override def values: scala.collection.Iterable[B] = synchronized { super.values }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
- override def keySet: collection.Set[A] = synchronized { super.keySet }
+ override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
@migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0")
- override def keys: collection.Iterable[A] = synchronized { super.keys }
+ override def keys: scala.collection.Iterable[A] = synchronized { super.keys }
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
override def isEmpty: Boolean = synchronized { super.isEmpty }
override def contains(key: A): Boolean = synchronized {super.contains(key) }
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 00675b9119..53b0c25a8f 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -103,7 +103,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* the clone. So clone complexity in time is O(1).
*
*/
- override def clone: TreeSet[A] = {
+ override def clone(): TreeSet[A] = {
val clone = new TreeSet[A](base, from, until)
clone.avl = resolve.avl
clone.cardinality = resolve.cardinality
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 6785aba10d..1c913c7ce7 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -8,11 +8,11 @@
package scala.collection.mutable
-import collection.AbstractIterator
-import collection.Iterator
-import collection.generic._
-import annotation.tailrec
-import reflect.ClassTag
+import scala.collection.AbstractIterator
+import scala.collection.Iterator
+import scala.collection.generic._
+import scala.annotation.tailrec
+import scala.reflect.ClassTag
/** A buffer that stores elements in an unrolled linked list.
*
@@ -43,11 +43,11 @@ import reflect.ClassTag
*/
@SerialVersionUID(1L)
class UnrolledBuffer[T](implicit val tag: ClassTag[T])
-extends collection.mutable.AbstractBuffer[T]
- with collection.mutable.Buffer[T]
- with collection.mutable.BufferLike[T, UnrolledBuffer[T]]
+extends scala.collection.mutable.AbstractBuffer[T]
+ with scala.collection.mutable.Buffer[T]
+ with scala.collection.mutable.BufferLike[T, UnrolledBuffer[T]]
with GenericClassTagTraversableTemplate[T, UnrolledBuffer]
- with collection.mutable.Builder[T, UnrolledBuffer[T]]
+ with scala.collection.mutable.Builder[T, UnrolledBuffer[T]]
with Serializable
{
import UnrolledBuffer.Unrolled
@@ -153,7 +153,7 @@ extends collection.mutable.AbstractBuffer[T]
this
}
- def insertAll(idx: Int, elems: collection.Traversable[T]) =
+ def insertAll(idx: Int, elems: scala.collection.Traversable[T]) =
if (idx >= 0 && idx <= sz) {
headptr.insertAll(idx, elems, this)
sz += elems.size
@@ -181,7 +181,7 @@ extends collection.mutable.AbstractBuffer[T]
}
override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this
-
+
override def stringPrefix = "UnrolledBuffer"
}
@@ -285,7 +285,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
if (next eq null) true else false // checks if last node was thrown out
} else false
- @tailrec final def insertAll(idx: Int, t: collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
+ @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
// divide this node at the appropriate position and insert all into head
// update new next
val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff)
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index fec0fbaa3c..4d9b510e57 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -75,7 +75,7 @@ extends AbstractSeq[T]
override def stringPrefix = "WrappedArray"
/** Clones this object, including the underlying Array. */
- override def clone: WrappedArray[T] = WrappedArray make array.clone()
+ override def clone(): WrappedArray[T] = WrappedArray make array.clone()
/** Creates new builder for this collection ==> move to subclasses
*/
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 85758b29bc..f6fb32e152 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -24,13 +24,13 @@ import scala.collection.GenIterable
import scala.collection.GenTraversableOnce
import scala.collection.GenTraversable
import immutable.HashMapCombiner
-import reflect.{ClassTag, classTag}
+import scala.reflect.{ClassTag, classTag}
import java.util.concurrent.atomic.AtomicBoolean
-import annotation.unchecked.uncheckedVariance
-import annotation.unchecked.uncheckedStable
-import language.{ higherKinds, implicitConversions }
+import scala.annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedStable
+import scala.language.{ higherKinds, implicitConversions }
/** A template trait for parallel collections of type `ParIterable[T]`.
@@ -171,9 +171,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The task support object which is responsible for scheduling and
* load-balancing tasks to processors.
- *
+ *
* @see [[scala.collection.parallel.TaskSupport]]
- */
+ */
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
@@ -188,18 +188,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
* A task support object can be changed in a parallel collection after it
* has been created, but only during a quiescent period, i.e. while there
* are no concurrent invocations to parallel collection methods.
- *
- * Here is a way to change the task support of a parallel collection:
- *
- * {{{
- * import scala.collection.parallel._
- * val pc = mutable.ParArray(1, 2, 3)
- * pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
- * }}}
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
*
* @see [[scala.collection.parallel.TaskSupport]]
- */
+ */
def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
def seq: Sequential
@@ -263,7 +263,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
- //protected[this] def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
+ //protected[this] def newBuilder: scala.collection.mutable.Builder[T, Repr] = newCombiner
/** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
* The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
@@ -453,7 +453,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /** Applies a function `f` to all the elements of $coll in a sequential order.
+ /** Applies a function `f` to all the elements of $coll in a undefined order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
@@ -848,6 +848,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def seq = self.seq.view
def splitter = self.splitter
def size = splitter.remaining
+ override def isEmpty = size == 0
}
override def toArray[U >: T: ClassTag]: Array[U] = {
@@ -858,7 +859,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toList: List[T] = seq.toList
- override def toIndexedSeq: collection.immutable.IndexedSeq[T] = seq.toIndexedSeq
+ override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq
override def toStream: Stream[T] = seq.toStream
@@ -866,7 +867,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
// the methods below are overridden
- override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
+ override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]]
@@ -877,13 +878,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
-
+
override def toVector: Vector[T] = to[Vector]
override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
toParCollection[T, Col[T]](() => cbf().asCombiner)
} else seq.to(cbf)
-
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
@@ -1367,7 +1368,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val until = from + len
val blocksize = scanBlockSize
while (i < until) {
- trees += scanBlock(i, math.min(blocksize, pit.remaining))
+ trees += scanBlock(i, scala.math.min(blocksize, pit.remaining))
i += blocksize
}
@@ -1495,7 +1496,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
debugBuffer += s
}
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private[parallel] def printDebugBuffer() = println(buildString {
append =>
for (s <- debugBuffer) {
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 91eefc2aa5..4f6962ff05 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -18,7 +18,7 @@ import scala.collection.GenSeq
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
-import language.implicitConversions
+import scala.language.implicitConversions
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 58197ab2c6..2bc5e783e6 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,13 +6,8 @@
** |/ **
\* */
-
package scala.collection.parallel
-
-
-
-
import scala.collection.Map
import scala.collection.GenMap
import scala.collection.mutable.Builder
@@ -21,10 +16,6 @@ import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
-
-
-
-
/** A template trait for parallel maps.
*
* $sideeffects
@@ -75,31 +66,3 @@ object ParMap extends ParMapFactory[ParMap] {
override def default(key: A): B = d(key)
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index afd1f30903..8bf7334c5f 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -16,7 +16,7 @@ import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
import scala.collection.mutable.Builder
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
import scala.collection.generic.IdleSignalling
import scala.collection.generic.Signalling
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index be5ab03ba7..27e8eeb174 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -44,7 +44,7 @@ trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, S
extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
-
+
type SuperParIterator = IterableSplitter[T]
/** A more refined version of the iterator found in the `ParallelIterable` trait,
@@ -330,6 +330,7 @@ self =>
def apply(idx: Int) = self(idx)
override def seq = self.seq.view
def splitter = self.splitter
+ override def isEmpty = size == 0
}
/* tasks */
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index a67a4d8eb7..9bf287cc39 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -190,7 +190,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(from)
- var left = math.max(until - from, 0)
+ var left = scala.math.max(until - from, 0)
cb.sizeHint(left)
while (left > 0) {
cb += next
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 7a0116b3b3..2556cd3f68 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -16,7 +16,7 @@ import scala.concurrent.forkjoin._
import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
@@ -98,7 +98,7 @@ trait Task[R, +Tp] {
*/
trait Tasks {
- private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
+ private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]()
private[parallel] def debuglog(s: String) = synchronized {
debugMessages += s
@@ -534,11 +534,11 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
trait ExecutionContextTasks extends Tasks {
-
+
def executionContext = environment
-
+
val environment: ExecutionContext
-
+
// this part is a hack which allows switching
val driver: Tasks = executionContext match {
case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match {
@@ -548,13 +548,13 @@ trait ExecutionContextTasks extends Tasks {
}
case _ => ???
}
-
+
def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task
-
+
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task
-
+
def parallelismLevel = driver.parallelismLevel
-
+
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index ad882390c8..187e4aaf92 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -20,8 +20,8 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.immutable.{ HashMap, TrieIterator }
-import annotation.unchecked.uncheckedVariance
-import collection.parallel.Task
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.parallel.Task
@@ -118,9 +118,9 @@ self =>
def remaining = sz - i
override def toString = "HashTrieIterator(" + sz + ")"
}
-
+
/* debug */
-
+
private[parallel] def printDebugInfo() {
println("Parallel hash trie")
println("Top level inner trie type: " + trie.getClass)
@@ -159,7 +159,7 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[parallel] abstract class HashMapCombiner[K, V]
-extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
+extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
import HashMapCombiner._
val emptyTrie = HashMap.empty[K, V]
@@ -264,7 +264,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int)
@@ -306,8 +306,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
unrolled = unrolled.next
}
- evaluateCombiners(trie)
- trie.asInstanceOf[HashMap[K, Repr]]
+ evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]]
}
private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match {
case hm1: HashMap.HashMap1[_, _] =>
@@ -329,7 +328,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index d1899601d7..85e2138c56 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -21,7 +21,7 @@ import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericCompanion
import scala.collection.immutable.{ HashSet, TrieIterator }
-import collection.parallel.Task
+import scala.collection.parallel.Task
@@ -132,7 +132,7 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[immutable] abstract class HashSetCombiner[T]
-extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
import HashSetCombiner._
val emptyTrie = HashSet.empty[T]
@@ -209,7 +209,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 349f4fa44c..5854844a8f 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -30,10 +30,10 @@ import scala.collection.GenIterable
* @since 2.9
*/
trait ParIterable[+T]
-extends collection/*.immutable*/.GenIterable[T]
- with collection.parallel.ParIterable[T]
+extends scala.collection/*.immutable*/.GenIterable[T]
+ with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 5060b36e7a..585e6bf541 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -28,11 +28,11 @@ import scala.collection.GenMapLike
* @since 2.9
*/
trait ParMap[K, +V]
-extends collection/*.immutable*/.GenMap[K, V]
+extends scala.collection/*.immutable*/.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
- with ParMapLike[K, V, ParMap[K, V], collection.immutable.Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]]
{
self =>
@@ -56,7 +56,7 @@ self =>
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefault[U >: V](d: K => U): collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d)
+ def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d)
/** The same map with a given default value.
*
@@ -65,7 +65,7 @@ self =>
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefaultValue[U >: V](d: U): collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
+ def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
}
@@ -79,7 +79,7 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V)
- extends collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
+ extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
override def empty = new WithDefault(underlying.empty, d)
override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d)
override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2)
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index dde6533c82..265121286d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -28,11 +28,11 @@ import scala.collection.GenSeq
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection/*.immutable*/.GenSeq[T]
- with collection.parallel.ParSeq[T]
+extends scala.collection/*.immutable*/.GenSeq[T]
+ with scala.collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.immutable.Seq[T]]
+ with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]]
{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
override def toSeq: ParSeq[T] = this
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 40429280ac..c8da509ef5 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -20,11 +20,11 @@ import scala.collection.parallel.Combiner
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends collection/*.immutable*/.GenSet[T]
+extends scala.collection/*.immutable*/.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
- with ParSetLike[T, ParSet[T], collection.immutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]]
{
self =>
override def empty: ParSet[T] = ParHashSet[T]()
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 3694f40477..44ae7e2ce9 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -22,7 +22,7 @@ import scala.collection.parallel.Combiner
* @tparam Buff the type of the buffers that contain leaf results and this combiner chains together
*/
trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To] {
-//self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
+//self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
val chain: ArrayBuffer[Buff]
val lastbuff = chain.last
def +=(elem: Elem) = { lastbuff += elem; this }
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 6889d8b472..56cc06f99e 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -665,7 +666,7 @@ self =>
val fp = howmany / 2
List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
}
/* serialization */
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index d0c7f6050e..c7f025207c 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -9,7 +9,7 @@
package scala.collection
package parallel.mutable
-import collection.parallel.IterableSplitter
+import scala.collection.parallel.IterableSplitter
/** Parallel flat hash table.
*
@@ -19,13 +19,13 @@ import collection.parallel.IterableSplitter
*
* @author Aleksandar Prokopec
*/
-trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
+trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
override def alwaysInitSizeMap = true
abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int)
extends IterableSplitter[T] with SizeMapUtils {
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private var traversed = 0
private val itertable = table
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 8d39d6e0de..fad7ddad59 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -12,12 +12,12 @@ package mutable
-import collection.generic._
-import collection.mutable.DefaultEntry
-import collection.mutable.HashEntry
-import collection.mutable.HashTable
-import collection.mutable.UnrolledBuffer
-import collection.parallel.Task
+import scala.collection.generic._
+import scala.collection.mutable.DefaultEntry
+import scala.collection.mutable.HashEntry
+import scala.collection.mutable.HashTable
+import scala.collection.mutable.UnrolledBuffer
+import scala.collection.parallel.Task
@@ -40,14 +40,14 @@ import collection.parallel.Task
class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
- with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]]
+ with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]]
with ParHashTable[K, DefaultEntry[K, V]]
with Serializable
{
self =>
initWithContents(contents)
- type Entry = collection.mutable.DefaultEntry[K, V]
+ type Entry = scala.collection.mutable.DefaultEntry[K, V]
def this() = this(null)
@@ -57,7 +57,7 @@ self =>
protected[this] override def newCombiner = ParHashMapCombiner[K, V]
- override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
+ override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents)
def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
@@ -67,13 +67,13 @@ self =>
def get(key: K): Option[V] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
def put(key: K, value: V): Option[V] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -86,9 +86,8 @@ self =>
}
def += (kv: (K, V)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -103,12 +102,19 @@ self =>
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
+ protected def createNewEntry[V1](key: K, value: V1): Entry = {
+ new Entry(key, value.asInstanceOf[V])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[V](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
}
private[parallel] override def brokenInvariants = {
@@ -157,8 +163,8 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int)
-extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
- with collection.mutable.HashTable.HashUtils[K]
+extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
+ with scala.collection.mutable.HashTable.HashUtils[K]
{
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
@@ -190,7 +196,9 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
object table extends HashTable[K, DefaultEntry[K, V]] {
- def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
+ type Entry = DefaultEntry[K, V]
+ def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
+ def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
sizeMapInit(table.length)
}
var i = 0
@@ -251,6 +259,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
assert(h >= block * blocksize && h < (block + 1) * blocksize)
}
}
+ protected def createNewEntry[X](key: K, x: X) = ???
}
/* tasks */
@@ -302,7 +311,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
override def merge(that: FillBlocks) {
this.result += that.result
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 783f8dce77..aef9f6856b 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -10,11 +10,11 @@ package scala.collection.parallel.mutable
-import collection.generic._
-import collection.mutable.FlatHashTable
-import collection.parallel.Combiner
-import collection.mutable.UnrolledBuffer
-import collection.parallel.Task
+import scala.collection.generic._
+import scala.collection.mutable.FlatHashTable
+import scala.collection.parallel.Combiner
+import scala.collection.mutable.UnrolledBuffer
+import scala.collection.parallel.Task
@@ -36,7 +36,7 @@ import collection.parallel.Task
class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
- with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]]
+ with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]]
with ParFlatHashTable[T]
with Serializable
{
@@ -57,7 +57,7 @@ extends ParSet[T]
def clear() = clearTable()
- override def seq = new collection.mutable.HashSet(hashTableContents)
+ override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
addEntry(elem)
@@ -88,7 +88,7 @@ extends ParSet[T]
init(in, x => x)
}
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
override def debugInformation = buildString {
append =>
append("Parallel flat hash table set")
@@ -117,8 +117,8 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
-extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
-with collection.mutable.FlatHashTable.HashUtils[T] {
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
+with scala.collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
@@ -158,12 +158,12 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
val tbl = new FlatHashTable[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
+ for {
+ buffer <- buckets;
+ if buffer ne null;
+ elem <- buffer
+ } addEntry(elem.asInstanceOf[T])
}
- for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } tbl.addEntry(elem.asInstanceOf[T])
tbl.hashTableContents
}
@@ -310,7 +310,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
// the total number of successfully inserted elements is adjusted accordingly
result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2)
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 8c93732427..bb9a7b7823 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -13,8 +13,8 @@ package parallel.mutable
-import collection.mutable.HashEntry
-import collection.parallel.IterableSplitter
+import scala.collection.mutable.HashEntry
+import scala.collection.parallel.IterableSplitter
@@ -22,7 +22,7 @@ import collection.parallel.IterableSplitter
* enriching the data structure by fulfilling certain requirements
* for their parallel construction and iteration.
*/
-trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.mutable.HashTable[K, Entry] {
+trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] {
override def alwaysInitSizeMap = true
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index b5747a31cf..9281e84c03 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -26,8 +26,8 @@ import scala.collection.GenIterable
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
- with collection.parallel.ParIterable[T]
+trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
+ with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]]
with Mutable {
@@ -39,7 +39,7 @@ trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
- def seq: collection.mutable.Iterable[T]
+ def seq: scala.collection.mutable.Iterable[T]
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 4b3eae4ad1..34b3d465d2 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -12,8 +12,8 @@ package scala.collection.parallel.mutable
-import collection.generic._
-import collection.parallel.Combiner
+import scala.collection.generic._
+import scala.collection.parallel.Combiner
@@ -28,11 +28,11 @@ import collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection/*.mutable*/.GenMap[K, V]
- with collection.parallel.ParMap[K, V]
+extends scala.collection/*.mutable*/.GenMap[K, V]
+ with scala.collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
+ with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
@@ -41,7 +41,7 @@ extends collection/*.mutable*/.GenMap[K, V]
override def empty: ParMap[K, V] = new ParHashMap[K, V]
- def seq: collection.mutable.Map[K, V]
+ def seq: scala.collection.mutable.Map[K, V]
override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
@@ -53,7 +53,7 @@ extends collection/*.mutable*/.GenMap[K, V]
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefault(d: K => V): collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d)
+ def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d)
/** The same map with a given default value.
*
@@ -62,7 +62,7 @@ extends collection/*.mutable*/.GenMap[K, V]
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
- def withDefaultValue(d: V): collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
+ def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
}
@@ -76,7 +76,7 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
class WithDefault[K, V](underlying: ParMap[K, V], d: K => V)
- extends collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
+ extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
override def += (kv: (K, V)) = {underlying += kv; this}
def -= (key: K) = {underlying -= key; this}
override def empty = new WithDefault(underlying.empty, d)
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 5c69c2e045..675b20949f 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -11,11 +11,11 @@ package mutable
-import collection.generic._
-import collection.mutable.Builder
-import collection.mutable.Cloneable
-import collection.generic.Growable
-import collection.generic.Shrinkable
+import scala.collection.generic._
+import scala.collection.mutable.Builder
+import scala.collection.mutable.Cloneable
+import scala.collection.generic.Growable
+import scala.collection.generic.Shrinkable
@@ -33,9 +33,9 @@ import collection.generic.Shrinkable
trait ParMapLike[K,
V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
- +Sequential <: collection.mutable.Map[K, V] with collection.mutable.MapLike[K, V, Sequential]]
-extends collection.GenMapLike[K, V, Repr]
- with collection.parallel.ParMapLike[K, V, Repr, Sequential]
+ +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]]
+extends scala.collection.GenMapLike[K, V, Repr]
+ with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential]
with Growable[(K, V)]
with Shrinkable[K]
with Cloneable[Repr]
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index f46b369494..7322d5236f 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -29,18 +29,18 @@ import scala.collection.GenSeq
* @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: collection.mutable.Seq[T]
+trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
with ParIterable[T]
- with collection.parallel.ParSeq[T]
+ with scala.collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] {
self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
//protected[this] override def newBuilder = ParSeq.newBuilder[T]
def update(i: Int, elem: T): Unit
- def seq: collection.mutable.Seq[T]
+ def seq: scala.collection.mutable.Seq[T]
override def toSeq: ParSeq[T] = this
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 6da4c8a7bc..540ecb8022 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -27,16 +27,16 @@ import scala.collection.GenSet
* @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection/*.mutable*/.GenSet[T]
+extends scala.collection/*.mutable*/.GenSet[T]
with ParIterable[T]
- with collection.parallel.ParSet[T]
+ with scala.collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
- with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]]
{
self =>
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def empty: ParSet[T] = ParHashSet()
- def seq: collection.mutable.Set[T]
+ def seq: scala.collection.mutable.Set[T]
}
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 969fc2a405..e41d779a4d 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -35,8 +35,8 @@ trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]]
extends GenSetLike[T, Repr]
- with collection.parallel.ParIterableLike[T, Repr, Sequential]
- with collection.parallel.ParSetLike[T, Repr, Sequential]
+ with scala.collection.parallel.ParIterableLike[T, Repr, Sequential]
+ with scala.collection.parallel.ParSetLike[T, Repr, Sequential]
with Growable[T]
with Shrinkable[T]
with Cloneable[Repr]
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 359c35f1dd..5c452f628c 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -34,7 +34,7 @@ import scala.collection.concurrent.TrieMapIterator
* @author Aleksandar Prokopec
* @since 2.10
* @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]]
- * section on `ParTrieMap` for more information.
+ * section on `ParTrieMap` for more information.
*/
final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V])
extends ParMap[K, V]
@@ -130,7 +130,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)
- override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
+ override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = {
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
level < maxsplits
}
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 01eb17024e..68f37137f8 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -81,7 +81,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
val fp = howmany / 2
List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 9648791502..5600d0f68c 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -8,10 +8,6 @@
package scala.collection.parallel.mutable
-
-
-
-
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
@@ -23,16 +19,12 @@ import scala.collection.parallel.Combiner
import scala.collection.parallel.Task
import scala.reflect.ClassTag
-
-
-
private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
}
-
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
@@ -85,7 +77,7 @@ extends Combiner[T, ParArray[T]] {
var pos = startpos
var arroffset = offset
while (totalleft > 0) {
- val lefthere = math.min(totalleft, curr.size - pos)
+ val lefthere = scala.math.min(totalleft, curr.size - pos)
Array.copy(curr.array, pos, array, arroffset, lefthere)
// println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr)
totalleft -= lefthere
@@ -107,13 +99,11 @@ extends Combiner[T, ParArray[T]] {
val fp = howmany / 2
List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")"
}
}
-
-
object UnrolledParArrayCombiner {
def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index e3124af12e..a95090c15b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -6,14 +6,15 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.mutable.ParArray
import scala.collection.mutable.UnrolledBuffer
-import annotation.unchecked.uncheckedVariance
-import language.implicitConversions
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.implicitConversions
/** Package object for parallel collections.
*/
@@ -41,14 +42,14 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
+ if (scala.util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = scala.util.Properties.javaVmVendor
if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
else new ThreadPoolTaskSupport
} else new ThreadPoolTaskSupport
val defaultTaskSupport: TaskSupport = getTaskSupport
-
+
def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
c match {
case pc: ParIterableLike[_, _, _] => pc.tasksupport = t
@@ -56,7 +57,7 @@ package object parallel {
}
c
}
-
+
/* implicit conversions */
implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] {
@@ -66,7 +67,7 @@ package object parallel {
def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
}
}
- implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
def isParallel = t.isInstanceOf[Parallel]
def isParIterable = t.isInstanceOf[ParIterable[_]]
def asParIterable = t.asInstanceOf[ParIterable[T]]
@@ -149,7 +150,7 @@ package parallel {
* Automatically forwards the signal delegate when splitting.
*/
private[parallel] class BufferSplitter[T]
- (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: collection.generic.Signalling)
+ (private val buffer: scala.collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: scala.collection.generic.Signalling)
extends IterableSplitter[T] {
signalDelegate = _sigdel
def hasNext = index < until
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index f18ce12e6c..77c12a8e58 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -109,7 +109,7 @@ object Platform {
* `System.getProperty("line.separator")`
* with a default value of "\n".
*/
- val EOL = util.Properties.lineSeparator
+ val EOL = scala.util.Properties.lineSeparator
/** The current time in milliseconds. The time is counted since 1 January 1970
* UTC.
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index 99bdfbc5a9..3bd7617bce 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -10,21 +10,40 @@ package scala.concurrent
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
trait Awaitable[+T] {
/**
- * Should throw [[scala.concurrent.TimeoutException]] if it times out
+ * Await the "resolved" state of this Awaitable.
* This method should not be called directly.
+ *
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the Awaitable itself
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type
/**
- * Throws exceptions if it cannot produce a T within the specified time.
+ * Await and return the result of this Awaitable, which is either of type T or a thrown exception (any Throwable).
* This method should not be called directly.
+ *
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the value if the Awaitable was successful within the specific maximum wait time
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
index 640560a174..83333a9e94 100644
--- a/src/library/scala/concurrent/BlockContext.scala
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -8,9 +8,6 @@
package scala.concurrent
-import java.lang.Thread
-import scala.concurrent.util.Duration
-
/**
* A context to be notified by `scala.concurrent.blocking` when
* a thread is about to block. In effect this trait provides
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 1be6050303..844ec14241 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -10,7 +10,6 @@ package scala.concurrent
import java.util.concurrent.{ ExecutorService, Executor }
-import scala.concurrent.util.Duration
import scala.annotation.implicitNotFound
import scala.util.Try
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index bc0b437a33..111900e7bc 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import language.higherKinds
+import scala.language.higherKinds
import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
@@ -16,7 +16,6 @@ import java.lang.{ Iterable => JIterable }
import java.util.{ LinkedList => JLinkedList }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
-import scala.concurrent.util.Duration
import scala.util.control.NonFatal
import scala.Option
import scala.util.{Try, Success, Failure}
@@ -213,7 +212,7 @@ trait Future[+T] extends Awaitable[T] {
* this future, or the 'f' function to the failed result. If there is any non-fatal
* exception thrown when 's' or 'f' is applied, that exception will be propagated
* to the resulting future.
- *
+ *
* @param s function that transforms a successful result of the receiver into a
* successful result of the returned future
* @param f function that transforms a failure of the receiver into a failure of
@@ -429,7 +428,7 @@ trait Future[+T] extends Awaitable[T] {
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
val p = Promise[(T, U)]()
-
+
this onComplete {
case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
case Success(r) =>
@@ -440,7 +439,7 @@ trait Future[+T] extends Awaitable[T] {
case f => p failure f
}
}
-
+
p.future
}
@@ -556,7 +555,7 @@ trait Future[+T] extends Awaitable[T] {
* Note: using this method yields nondeterministic dataflow programs.
*/
object Future {
-
+
private[concurrent] val toBoxed = Map[Class[_], Class[_]](
classOf[Boolean] -> classOf[java.lang.Boolean],
classOf[Byte] -> classOf[java.lang.Byte],
@@ -570,19 +569,19 @@ object Future {
)
/** Creates an already completed Future with the specified exception.
- *
+ *
* @tparam T the type of the value in the future
* @return the newly created `Future` object
*/
def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
/** Creates an already completed Future with the specified result.
- *
+ *
* @tparam T the type of the value in the future
* @return the newly created `Future` object
*/
def successful[T](result: T): Future[T] = Promise.successful(result).future
-
+
/** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
*
* The result becomes available once the asynchronous computation is completed.
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index d7f1e1c2f9..eeadaddb5e 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import language.{implicitConversions, higherKinds}
+import scala.language.{implicitConversions, higherKinds}
/** The `FutureTaskRunner</code> trait is a base trait of task runners
* that provide some sort of future abstraction.
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index ffb9926fef..f66d64bc3b 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -9,7 +9,7 @@
package scala.concurrent
import java.util.concurrent.{ExecutorService, Executor}
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `JavaConversions` object provides implicit converstions supporting
* interoperability between Scala and Java concurrency classes.
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 2e11ac42b0..2037c43cf8 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import language.{higherKinds, implicitConversions}
+import scala.language.{higherKinds, implicitConversions}
/** The `TaskRunner` trait...
*
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index 594555d49b..4b777ba069 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -9,7 +9,7 @@
package scala.concurrent
import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `ThreadPoolRunner` trait uses a `java.util.concurrent.ExecutorService`
* to run submitted tasks.
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
index ab709e0210..067269a911 100644
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ b/src/library/scala/concurrent/ThreadRunner.scala
@@ -9,7 +9,7 @@
package scala.concurrent
import java.lang.Thread
-import language.implicitConversions
+import scala.language.implicitConversions
/** The `ThreadRunner` trait...
*
diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala
new file mode 100644
index 0000000000..50e9a75ff7
--- /dev/null
+++ b/src/library/scala/concurrent/duration/Deadline.scala
@@ -0,0 +1,81 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+/**
+ * This class stores a deadline, as obtained via `Deadline.now` or the
+ * duration DSL:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * 3.seconds.fromNow
+ * }}}
+ *
+ * Its main purpose is to manage repeated attempts to achieve something (like
+ * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All
+ * durations are measured according to `System.nanoTime` aka wall-time; this
+ * does not take into account changes to the system clock (such as leap
+ * seconds).
+ */
+case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] {
+ /**
+ * Return a deadline advanced (i.e. moved into the future) by the given duration.
+ */
+ def +(other: FiniteDuration): Deadline = copy(time = time + other)
+ /**
+ * Return a deadline moved backwards (i.e. towards the past) by the given duration.
+ */
+ def -(other: FiniteDuration): Deadline = copy(time = time - other)
+ /**
+ * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative).
+ */
+ def -(other: Deadline): FiniteDuration = time - other.time
+ /**
+ * Calculate time difference between this duration and now; the result is negative if the deadline has passed.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def timeLeft: FiniteDuration = this - Deadline.now
+ /**
+ * Determine whether the deadline still lies in the future at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def hasTimeLeft(): Boolean = !isOverdue()
+ /**
+ * Determine whether the deadline lies in the past at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ def compare(other: Deadline) = time compare other.time
+}
+
+object Deadline {
+ /**
+ * Construct a deadline due exactly at the point where this method is called. Useful for then
+ * advancing it to obtain a future deadline, or for sampling the current time exactly once and
+ * then comparing it to multiple deadlines (using subtraction).
+ */
+ def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ implicit object DeadlineIsOrdered extends Ordering[Deadline] {
+ def compare(a: Deadline, b: Deadline) = a compare b
+ }
+
+}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
new file mode 100644
index 0000000000..79f9b4db86
--- /dev/null
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -0,0 +1,698 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+import java.lang.{ Double => JDouble, Long => JLong }
+import scala.language.implicitConversions
+
+object Duration {
+
+ /**
+ * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
+ *
+ * - the unit is NANOSECONDS
+ * - and the length has an absolute value greater than 2^53
+ *
+ * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
+
+ /**
+ * Construct a finite duration from the given length and time unit. The unit given is retained
+ * throughout calculations as long as possible, so that it can be retrieved later.
+ */
+ def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
+
+ /**
+ * Construct a finite duration from the given length and time unit, where the latter is
+ * looked up in a list of string representation. Valid choices are:
+ *
+ * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond`
+ * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days").
+ */
+ def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
+
+ // Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53
+ private[this] final val maxPreciseDouble = 9007199254740992d
+
+ /**
+ * Parse String into Duration. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ *
+ * @throws NumberFormatException if format is not parseable
+ */
+ def apply(s: String): Duration = {
+ val s1: String = s filterNot (_.isWhitespace)
+ s1 match {
+ case "Inf" | "PlusInf" | "+Inf" => Inf
+ case "MinusInf" | "-Inf" => MinusInf
+ case _ =>
+ val unitName = s1.reverse takeWhile (_.isLetter) reverse;
+ timeUnit get unitName match {
+ case Some(unit) =>
+ val valueStr = s1 dropRight unitName.length
+ val valueD = JDouble.parseDouble(valueStr)
+ if (valueD >= -maxPreciseDouble && valueD <= maxPreciseDouble) Duration(valueD, unit)
+ else Duration(JLong.parseLong(valueStr), unit)
+ case _ => throw new NumberFormatException("format error " + s)
+ }
+ }
+ }
+
+ // "ms milli millisecond" -> List("ms", "milli", "millis", "millisecond", "milliseconds")
+ private[this] def words(s: String) = (s.trim split "\\s+").toList
+ private[this] def expandLabels(labels: String): List[String] = {
+ val hd :: rest = words(labels)
+ hd :: rest.flatMap(s => List(s, s + "s"))
+ }
+ private[this] val timeUnitLabels = List(
+ DAYS -> "d day",
+ HOURS -> "h hour",
+ MINUTES -> "min minute",
+ SECONDS -> "s sec second",
+ MILLISECONDS -> "ms milli millisecond",
+ MICROSECONDS -> "µs micro microsecond",
+ NANOSECONDS -> "ns nano nanosecond"
+ )
+
+ // TimeUnit => standard label
+ protected[duration] val timeUnitName: Map[TimeUnit, String] =
+ timeUnitLabels.toMap mapValues (s => words(s).last) toMap
+
+ // Label => TimeUnit
+ protected[duration] val timeUnit: Map[String, TimeUnit] =
+ timeUnitLabels flatMap { case (unit, names) => expandLabels(names) map (_ -> unit) } toMap
+
+ /**
+ * Extract length and time unit out of a string, where the format must match the description for [[Duration$.apply(String):Duration apply(String)]].
+ * The extractor will not match for malformed strings or non-finite durations.
+ */
+ def unapply(s: String): Option[(Long, TimeUnit)] =
+ ( try Some(apply(s)) catch { case _: RuntimeException => None } ) flatMap unapply
+
+ /**
+ * Extract length and time unit out of a duration, if it is finite.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] =
+ if (d.isFinite) Some((d.length, d.unit)) else None
+
+ /**
+ * Construct a possibly infinite or undefined Duration from the given number of nanoseconds.
+ *
+ * - `Double.PositiveInfinity` is mapped to [[Duration.Inf]]
+ * - `Double.NegativeInfinity` is mapped to [[Duration.MinusInf]]
+ * - `Double.NaN` is mapped to [[Duration.Undefined]]
+ * - `-0d` is mapped to [[Duration.Zero]] (exactly like `0d`)
+ *
+ * The semantics of the resulting Duration objects matches the semantics of their Double
+ * counterparts with respect to arithmetic operations.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def fromNanos(nanos: Double): Duration = {
+ if (nanos.isInfinite)
+ if (nanos > 0) Inf else MinusInf
+ else if (nanos.isNaN)
+ Undefined
+ else if (nanos > Long.MaxValue || nanos < Long.MinValue)
+ throw new IllegalArgumentException("trying to construct too large duration with " + nanos + "ns")
+ else
+ fromNanos((nanos + 0.5).toLong)
+ }
+
+ private[this] final val µs_per_ns = 1000L
+ private[this] final val ms_per_ns = µs_per_ns * 1000
+ private[this] final val s_per_ns = ms_per_ns * 1000
+ private[this] final val min_per_ns = s_per_ns * 60
+ private[this] final val h_per_ns = min_per_ns * 60
+ private[this] final val d_per_ns = h_per_ns * 24
+
+ /**
+ * Construct a finite duration from the given number of nanoseconds. The
+ * result will have the coarsest possible time unit which can exactly express
+ * this duration.
+ *
+ * @throws IllegalArgumentException for `Long.MinValue` since that would lead to inconsistent behavior afterwards (cannot be negated)
+ */
+ def fromNanos(nanos: Long): FiniteDuration = {
+ if (nanos % d_per_ns == 0) Duration(nanos / d_per_ns, DAYS)
+ else if (nanos % h_per_ns == 0) Duration(nanos / h_per_ns, HOURS)
+ else if (nanos % min_per_ns == 0) Duration(nanos / min_per_ns, MINUTES)
+ else if (nanos % s_per_ns == 0) Duration(nanos / s_per_ns, SECONDS)
+ else if (nanos % ms_per_ns == 0) Duration(nanos / ms_per_ns, MILLISECONDS)
+ else if (nanos % µs_per_ns == 0) Duration(nanos / µs_per_ns, MICROSECONDS)
+ else Duration(nanos, NANOSECONDS)
+ }
+
+ /**
+ * Preconstructed value of `0.days`.
+ */
+ // unit as coarse as possible to keep (_ + Zero) sane unit-wise
+ val Zero: FiniteDuration = new FiniteDuration(0, DAYS)
+
+ /**
+ * The Undefined value corresponds closely to Double.NaN:
+ *
+ * - it is the result of otherwise invalid operations
+ * - it does not equal itself (according to `equals()`)
+ * - it compares greater than any other Duration apart from itself (for which `compare` returns 0)
+ *
+ * The particular comparison semantics mirror those of Double.NaN.
+ *
+ * '''''Use `eq` when checking an input of a method against this value.'''''
+ */
+ val Undefined: Infinite = new Infinite {
+ override def toString = "Duration.Undefined"
+ override def equals(other: Any) = false
+ override def +(other: Duration): Duration = this
+ override def -(other: Duration): Duration = this
+ override def *(factor: Double): Duration = this
+ override def /(factor: Double): Duration = this
+ override def /(other: Duration): Double = Double.NaN
+ def compare(other: Duration) = if (other eq this) 0 else 1
+ def unary_- : Duration = this
+ def toUnit(unit: TimeUnit): Double = Double.NaN
+ }
+
+ sealed abstract class Infinite extends Duration {
+ def +(other: Duration): Duration = other match {
+ case x if x eq Undefined => Undefined
+ case x: Infinite if x ne this => Undefined
+ case _ => this
+ }
+ def -(other: Duration): Duration = other match {
+ case x if x eq Undefined => Undefined
+ case x: Infinite if x eq this => Undefined
+ case _ => this
+ }
+
+ def *(factor: Double): Duration =
+ if (factor == 0d || factor.isNaN) Undefined
+ else if (factor < 0d) -this
+ else this
+ def /(divisor: Double): Duration =
+ if (divisor.isNaN || divisor.isInfinite) Undefined
+ else if ((divisor compare 0d) < 0) -this
+ else this
+ def /(divisor: Duration): Double = divisor match {
+ case _: Infinite => Double.NaN
+ case x => Double.PositiveInfinity * (if ((this > Zero) ^ (divisor >= Zero)) -1 else 1)
+ }
+
+ final def isFinite() = false
+
+ private[this] def fail(what: String) = throw new IllegalArgumentException(s"$what not allowed on infinite Durations")
+ final def length: Long = fail("length")
+ final def unit: TimeUnit = fail("unit")
+ final def toNanos: Long = fail("toNanos")
+ final def toMicros: Long = fail("toMicros")
+ final def toMillis: Long = fail("toMillis")
+ final def toSeconds: Long = fail("toSeconds")
+ final def toMinutes: Long = fail("toMinutes")
+ final def toHours: Long = fail("toHours")
+ final def toDays: Long = fail("toDays")
+ }
+
+ /**
+ * Infinite duration: greater than any other (apart from Undefined) and not equal to any other
+ * but itself. This value closely corresponds to Double.PositiveInfinity,
+ * matching its semantics in arithmetic operations.
+ */
+ val Inf: Infinite = new Infinite {
+ override def toString = "Duration.Inf"
+ def compare(other: Duration) = other match {
+ case x if x eq Undefined => -1 // Undefined != Undefined
+ case x if x eq this => 0 // `case Inf` will include null checks in the byte code
+ case _ => 1
+ }
+ def unary_- : Duration = MinusInf
+ def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity
+ }
+
+ /**
+ * Infinite duration: less than any other and not equal to any other
+ * but itself. This value closely corresponds to Double.NegativeInfinity,
+ * matching its semantics in arithmetic operations.
+ */
+ val MinusInf: Infinite = new Infinite {
+ override def toString = "Duration.MinusInf"
+ def compare(other: Duration) = if (other eq this) 0 else -1
+ def unary_- : Duration = Inf
+ def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity
+ }
+
+ // Java Factories
+
+ /**
+ * Construct a finite duration from the given length and time unit. The unit given is retained
+ * throughout calculations as long as possible, so that it can be retrieved later.
+ */
+ def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit)
+ /**
+ * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
+ *
+ * - the unit is NANOSECONDS
+ * - and the length has an absolute value greater than 2^53
+ *
+ * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
+ /**
+ * Construct a finite duration from the given length and time unit, where the latter is
+ * looked up in a list of string representation. Valid choices are:
+ *
+ * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond`
+ * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days").
+ */
+ def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
+ /**
+ * Parse String into Duration. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ *
+ * @throws NumberFormatException if format is not parseable
+ */
+ def create(s: String): Duration = apply(s)
+
+ /**
+ * The natural ordering of durations matches the natural ordering for Double, including non-finite values.
+ */
+ implicit object DurationIsOrdered extends Ordering[Duration] {
+ def compare(a: Duration, b: Duration) = a compare b
+ }
+}
+
+/**
+ * <h2>Utility for working with java.util.concurrent.TimeUnit durations.</h2>
+ *
+ * '''''This class is not meant as a general purpose representation of time, it is
+ * optimized for the needs of `scala.concurrent`.'''''
+ *
+ * <h2>Basic Usage</h2>
+ *
+ * <p/>
+ * Examples:
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * }}}
+ *
+ * '''''Invoking inexpressible conversions (like calling `toSeconds` on an infinite duration) will throw an IllegalArgumentException.'''''
+ *
+ * <p/>
+ * Implicits are also provided for Int, Long and Double. Example usage:
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = 100 millis
+ * }}}
+ *
+ * '''''The DSL provided by the implicit conversions always allows construction of finite durations, even for infinite Double inputs; use Duration.Inf instead.'''''
+ *
+ * Extractors, parsing and arithmetic are also included:
+ * {{{
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * }}}
+ *
+ * <h2>Handling of Time Units</h2>
+ *
+ * Calculations performed on finite durations always retain the more precise unit of either operand, no matter
+ * whether a coarser unit would be able to exactly express the same duration. This means that Duration can be
+ * used as a lossless container for a (length, unit) pair if it is constructed using the corresponding methods
+ * and no arithmetic is performed on it; adding/subtracting durations should in that case be done with care.
+ *
+ * <h2>Correspondence to Double Semantics</h2>
+ *
+ * The semantics of arithmetic operations on Duration are two-fold:
+ *
+ * - exact addition/subtraction with nanosecond resolution for finite durations, independent of the summands' magnitude
+ * - isomorphic to `java.lang.Double` when it comes to infinite or undefined values
+ *
+ * The conversion between Duration and Double is done using [[Duration.toUnit]] (with unit NANOSECONDS)
+ * and [[Duration$.fromNanos(Double):Duration Duration.fromNanos(Double)]].
+ *
+ * <h2>Ordering</h2>
+ *
+ * The default ordering is consistent with the ordering of Double numbers, which means that Undefined is
+ * considered greater than all other durations, including [[Duration.Inf]].
+ *
+ * @define exc @throws IllegalArgumentException when invoked on a non-finite duration
+ *
+ * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is +-(2^63-1)ns, and no conversion to infinite durations takes place.
+ */
+sealed abstract class Duration extends Serializable with Ordered[Duration] {
+ /**
+ * Obtain the length of this Duration measured in the unit obtained by the `unit` method.
+ *
+ * $exc
+ */
+ def length: Long
+ /**
+ * Obtain the time unit in which the length of this duration is measured.
+ *
+ * $exc
+ */
+ def unit: TimeUnit
+ /**
+ * Return the length of this duration measured in whole nanoseconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toNanos: Long
+ /**
+ * Return the length of this duration measured in whole microseconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toMicros: Long
+ /**
+ * Return the length of this duration measured in whole milliseconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toMillis: Long
+ /**
+ * Return the length of this duration measured in whole seconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toSeconds: Long
+ /**
+ * Return the length of this duration measured in whole minutes, rounding towards zero.
+ *
+ * $exc
+ */
+ def toMinutes: Long
+ /**
+ * Return the length of this duration measured in whole hours, rounding towards zero.
+ *
+ * $exc
+ */
+ def toHours: Long
+ /**
+ * Return the length of this duration measured in whole days, rounding towards zero.
+ *
+ * $exc
+ */
+ def toDays: Long
+ /**
+ * Return the number of nanoseconds as floating point number, scaled down to the given unit.
+ * The result may not precisely represent this duration due to the Double datatype's inherent
+ * limitations (mantissa size effectively 53 bits). Non-finite durations are represented as
+ * - [[Duration.Undefined]] is mapped to Double.NaN
+ * - [[Duration.Inf]] is mapped to Double.PositiveInfinity
+ * - [[Duration.MinusInf]] is mapped to Double.NegativeInfinity
+ */
+ def toUnit(unit: TimeUnit): Double
+
+ /**
+ * Return the sum of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def +(other: Duration): Duration
+ /**
+ * Return the difference of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def -(other: Duration): Duration
+ /**
+ * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def *(factor: Double): Duration
+ /**
+ * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def /(divisor: Double): Duration
+ /**
+ * Return the quotient of this and that duration as floating-point number. The semantics are
+ * determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
+ */
+ def /(divisor: Duration): Double
+ /**
+ * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
+ */
+ def unary_- : Duration
+ /**
+ * This method returns whether this duration is finite, which is not the same as
+ * `!isInfinite` for Double because this method also returns `false` for [[Duration.Undefined]].
+ */
+ def isFinite(): Boolean
+ /**
+ * Return the smaller of this and that duration as determined by the natural ordering.
+ */
+ def min(other: Duration): Duration = if (this < other) this else other
+ /**
+ * Return the larger of this and that duration as determined by the natural ordering.
+ */
+ def max(other: Duration): Duration = if (this > other) this else other
+
+ // Java API
+
+ /**
+ * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def div(divisor: Double) = this / divisor
+ /**
+ * Return the quotient of this and that duration as floating-point number. The semantics are
+ * determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
+ */
+ def div(other: Duration) = this / other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ /**
+ * Return the difference of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def minus(other: Duration) = this - other
+ /**
+ * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def mul(factor: Double) = this * factor
+ /**
+ * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
+ */
+ def neg() = -this
+ /**
+ * Return the sum of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def plus(other: Duration) = this + other
+}
+
+object FiniteDuration {
+
+ implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] {
+ def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
+ }
+
+ def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit)
+ def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
+
+ // limit on abs. value of durations in their units
+ private final val max_ns = Long.MaxValue
+ private final val max_µs = max_ns / 1000
+ private final val max_ms = max_µs / 1000
+ private final val max_s = max_ms / 1000
+ private final val max_min= max_s / 60
+ private final val max_h = max_min / 60
+ private final val max_d = max_h / 24
+}
+
+/**
+ * This class represents a finite duration. Its addition and subtraction operators are overloaded to retain
+ * this guarantee statically. The range of this class is limited to +-(2^63-1)ns, which is roughly 292 years.
+ */
+final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import FiniteDuration._
+ import Duration._
+
+ private[this] def bounded(max: Long) = -max <= length && length <= max
+
+ require(unit match {
+ /*
+ * enforce the 2^63-1 ns limit, must be pos/neg symmetrical because of unary_-
+ */
+ case NANOSECONDS ⇒ bounded(max_ns)
+ case MICROSECONDS ⇒ bounded(max_µs)
+ case MILLISECONDS ⇒ bounded(max_ms)
+ case SECONDS ⇒ bounded(max_s)
+ case MINUTES ⇒ bounded(max_min)
+ case HOURS ⇒ bounded(max_h)
+ case DAYS ⇒ bounded(max_d)
+ case _ ⇒
+ val v = DAYS.convert(length, unit)
+ -max_d <= v && v <= max_d
+ }, "Duration is limited to +-(2^63-1)ns (ca. 292 years)")
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
+ def toSeconds = unit.toSeconds(length)
+ def toMinutes = unit.toMinutes(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
+ def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u)
+
+ /**
+ * Construct a [[Deadline]] from this duration by adding it to the current instant `Deadline.now`.
+ */
+ def fromNow: Deadline = Deadline.now + this
+
+ private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" )
+ override def toString = "" + length + " " + unitString
+
+ def compare(other: Duration) = other match {
+ case x: FiniteDuration => toNanos compare x.toNanos
+ case _ => -(other compare this)
+ }
+
+ // see https://www.securecoding.cert.org/confluence/display/java/NUM00-J.+Detect+or+prevent+integer+overflow
+ private[this] def safeAdd(a: Long, b: Long): Long = {
+ if ((b > 0) && (a > Long.MaxValue - b) ||
+ (b < 0) && (a < Long.MinValue - b)) throw new IllegalArgumentException("integer overflow")
+ a + b
+ }
+ private[this] def add(otherLength: Long, otherUnit: TimeUnit): FiniteDuration = {
+ val commonUnit = if (otherUnit.convert(1, unit) == 0) unit else otherUnit
+ val totalLength = safeAdd(commonUnit.convert(length, unit), commonUnit.convert(otherLength, otherUnit))
+ new FiniteDuration(totalLength, commonUnit)
+ }
+
+ def +(other: Duration) = other match {
+ case x: FiniteDuration => add(x.length, x.unit)
+ case _ => other
+ }
+ def -(other: Duration) = other match {
+ case x: FiniteDuration => add(-x.length, x.unit)
+ case _ => other
+ }
+
+ def *(factor: Double) =
+ if (!factor.isInfinite) fromNanos(toNanos * factor)
+ else if (factor.isNaN) Undefined
+ else if ((factor > 0) ^ (this < Zero)) Inf
+ else MinusInf
+
+ def /(divisor: Double) =
+ if (!divisor.isInfinite) fromNanos(toNanos / divisor)
+ else if (divisor.isNaN) Undefined
+ else Zero
+
+ // if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
+ private[this] def minusZero = -0d
+ def /(divisor: Duration): Double =
+ if (divisor.isFinite) toNanos.toDouble / divisor.toNanos
+ else if (divisor eq Undefined) Double.NaN
+ else if ((length < 0) ^ (divisor > Zero)) 0d
+ else minusZero
+
+ // overloaded methods taking FiniteDurations, so that you can calculate while statically staying finite
+ def +(other: FiniteDuration) = add(other.length, other.unit)
+ def -(other: FiniteDuration) = add(-other.length, other.unit)
+ def plus(other: FiniteDuration) = this + other
+ def minus(other: FiniteDuration) = this - other
+ def min(other: FiniteDuration) = if (this < other) this else other
+ def max(other: FiniteDuration) = if (this > other) this else other
+
+ // overloaded methods taking Long so that you can calculate while statically staying finite
+
+ /**
+ * Return the quotient of this duration and the given integer factor.
+ *
+ * @throws ArithmeticException if the factor is 0
+ */
+ def /(divisor: Long) = fromNanos(toNanos / divisor)
+
+ /**
+ * Return the product of this duration and the given integer factor.
+ *
+ * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration
+ */
+ def *(factor: Long) = new FiniteDuration(safeMul(length, factor), unit)
+
+ /*
+ * This method avoids the use of Long division, which saves 95% of the time spent,
+ * by checking that there are enough leading zeros so that the result has a chance
+ * to fit into a Long again; the remaining edge cases are caught by using the sign
+ * of the product for overflow detection.
+ *
+ * This method is not general purpose because it disallows the (otherwise legal)
+ * case of Long.MinValue * 1, but that is okay for use in FiniteDuration, since
+ * Long.MinValue is not a legal `length` anyway.
+ */
+ private def safeMul(_a: Long, _b: Long): Long = {
+ val a = math.abs(_a)
+ val b = math.abs(_b)
+ import java.lang.Long.{ numberOfLeadingZeros => leading }
+ if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow")
+ val product = a * b
+ if (product < 0) throw new IllegalArgumentException("multiplication overflow")
+ if (a == _a ^ b == _b) -product else product
+ }
+
+ /**
+ * Return the quotient of this duration and the given integer factor.
+ *
+ * @throws ArithmeticException if the factor is 0
+ */
+ def div(divisor: Long) = this / divisor
+
+ /**
+ * Return the product of this duration and the given integer factor.
+ *
+ * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration
+ */
+ def mul(factor: Long) = this * factor
+
+ def unary_- = Duration(-length, unit)
+
+ final def isFinite() = true
+
+ override def equals(other: Any) = other match {
+ case x: FiniteDuration => toNanos == x.toNanos
+ case _ => super.equals(other)
+ }
+ override def hashCode = toNanos.toInt
+}
diff --git a/src/library/scala/concurrent/duration/DurationConversions.scala b/src/library/scala/concurrent/duration/DurationConversions.scala
new file mode 100644
index 0000000000..2c7e192a0e
--- /dev/null
+++ b/src/library/scala/concurrent/duration/DurationConversions.scala
@@ -0,0 +1,92 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+import DurationConversions._
+
+// Would be nice to limit the visibility of this trait a little bit,
+// but it crashes scalac to do so.
+trait DurationConversions extends Any {
+ protected def durationIn(unit: TimeUnit): FiniteDuration
+
+ def nanoseconds = durationIn(NANOSECONDS)
+ def nanos = nanoseconds
+ def nanosecond = nanoseconds
+ def nano = nanoseconds
+
+ def microseconds = durationIn(MICROSECONDS)
+ def micros = microseconds
+ def microsecond = microseconds
+ def micro = microseconds
+
+ def milliseconds = durationIn(MILLISECONDS)
+ def millis = milliseconds
+ def millisecond = milliseconds
+ def milli = milliseconds
+
+ def seconds = durationIn(SECONDS)
+ def second = seconds
+
+ def minutes = durationIn(MINUTES)
+ def minute = minutes
+
+ def hours = durationIn(HOURS)
+ def hour = hours
+
+ def days = durationIn(DAYS)
+ def day = days
+
+ def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds)
+ def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+ def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+ def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+
+ def microseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(microseconds)
+ def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+ def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+ def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+
+ def milliseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(milliseconds)
+ def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+ def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+ def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+
+ def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds)
+ def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c)
+
+ def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes)
+ def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c)
+
+ def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours)
+ def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c)
+
+ def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days)
+ def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c)
+}
+
+/**
+ * This object just holds some cogs which make the DSL machine work, not for direct consumption.
+ */
+object DurationConversions {
+ trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+ }
+
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+}
diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala
new file mode 100644
index 0000000000..2fd735f19e
--- /dev/null
+++ b/src/library/scala/concurrent/duration/package.scala
@@ -0,0 +1,75 @@
+package scala.concurrent
+
+import scala.language.implicitConversions
+
+package object duration {
+ /**
+ * This object can be used as closing token if you prefer dot-less style but do not want
+ * to enable language.postfixOps:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = 2 seconds span
+ * }}}
+ */
+ object span
+
+ /**
+ * This object can be used as closing token for declaring a deadline at some future point
+ * in time:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val deadline = 3 seconds fromNow
+ * }}}
+ */
+ object fromNow
+
+ type TimeUnit = java.util.concurrent.TimeUnit
+ final val DAYS = java.util.concurrent.TimeUnit.DAYS
+ final val HOURS = java.util.concurrent.TimeUnit.HOURS
+ final val MICROSECONDS = java.util.concurrent.TimeUnit.MICROSECONDS
+ final val MILLISECONDS = java.util.concurrent.TimeUnit.MILLISECONDS
+ final val MINUTES = java.util.concurrent.TimeUnit.MINUTES
+ final val NANOSECONDS = java.util.concurrent.TimeUnit.NANOSECONDS
+ final val SECONDS = java.util.concurrent.TimeUnit.SECONDS
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit)
+
+ implicit final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ }
+
+ implicit final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ }
+
+ implicit final class DurationDouble(val d: Double) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration =
+ Duration(d, unit) match {
+ case f: FiniteDuration => f
+ case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d)
+ }
+ }
+
+ /*
+ * Avoid reflection based invocation by using non-duck type
+ */
+ implicit final class IntMult(val i: Int) extends AnyVal {
+ def *(d: Duration) = d * i
+ def *(d: FiniteDuration) = d * i
+ }
+
+ implicit final class LongMult(val i: Long) extends AnyVal {
+ def *(d: Duration) = d * i
+ def *(d: FiniteDuration) = d * i
+ }
+
+ implicit final class DoubleMult(val f: Double) extends AnyVal {
+ def *(d: Duration) = d * f
+ }
+}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 875a558887..c517a05a81 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -14,7 +14,6 @@ import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorS
import java.util.Collection
import scala.concurrent.forkjoin._
import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
-import scala.concurrent.util.Duration
import scala.util.control.NonFatal
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index b19bed004b..ff268d850c 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,16 +8,12 @@
package scala.concurrent.impl
-
-
-import java.util.concurrent.TimeUnit.NANOSECONDS
import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
-
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
}
@@ -48,7 +44,7 @@ private[concurrent] object Promise {
case Failure(t) => resolver(t)
case _ => source
}
-
+
private def resolver[T](throwable: Throwable): Try[T] = throwable match {
case t: scala.runtime.NonLocalReturnControl[_] => Success(t.value.asInstanceOf[T])
case t: scala.util.control.ControlThrowable => Failure(new ExecutionException("Boxed ControlThrowable", t))
@@ -56,38 +52,48 @@ private[concurrent] object Promise {
case e: Error => Failure(new ExecutionException("Boxed Error", e))
case t => Failure(t)
}
-
+
/** Default promise implementation.
*/
class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
updateState(null, Nil) // Start at "No callbacks"
-
+
protected final def tryAwait(atMost: Duration): Boolean = {
@tailrec
- def awaitUnsafe(waitTimeNanos: Long): Boolean = {
- if (value.isEmpty && waitTimeNanos > 0) {
- val ms = NANOSECONDS.toMillis(waitTimeNanos)
- val ns = (waitTimeNanos % 1000000l).toInt // as per object.wait spec
- val start = System.nanoTime()
- try {
- synchronized {
- if (!isCompleted) wait(ms, ns) // previously - this was a `while`, ending up in an infinite loop
- }
- } catch {
- case e: InterruptedException =>
- }
+ def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
+ if (!isCompleted && nextWait > Duration.Zero) {
+ val ms = nextWait.toMillis
+ val ns = (nextWait.toNanos % 1000000l).toInt // as per object.wait spec
- awaitUnsafe(waitTimeNanos - (System.nanoTime() - start))
+ synchronized { if (!isCompleted) wait(ms, ns) }
+
+ awaitUnsafe(deadline, deadline.timeLeft)
} else
isCompleted
}
- awaitUnsafe(if (atMost.isFinite) atMost.toNanos else Long.MaxValue)
+ @tailrec
+ def awaitUnbounded(): Boolean = {
+ if (isCompleted) true
+ else {
+ synchronized { if (!isCompleted) wait() }
+ awaitUnbounded()
+ }
+ }
+
+ import Duration.Undefined
+ atMost match {
+ case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf => awaitUnbounded
+ case Duration.MinusInf => isCompleted
+ case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
+ }
}
@throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
if (isCompleted || tryAwait(atMost)) this
- else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds")
+ else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
@@ -101,7 +107,7 @@ private[concurrent] object Promise {
case _ => None
}
- override def isCompleted(): Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
+ override def isCompleted: Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
case _: Try[_] => true
case _ => false
}
@@ -150,7 +156,7 @@ private[concurrent] object Promise {
val value = Some(resolveTry(suppliedValue))
- override def isCompleted(): Boolean = true
+ override def isCompleted: Boolean = true
def tryComplete(value: Try[T]): Boolean = false
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index a2ef42fac8..e683732e41 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -8,7 +8,7 @@
package scala
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
import scala.annotation.implicitNotFound
/** This package object contains primitives for concurrent and parallel programming.
@@ -67,26 +67,39 @@ package concurrent {
*/
object Await {
/**
+ * Await the "resolved" state of this Awaitable.
* Invokes ready() on the awaitable, properly wrapped by a call to `scala.concurrent.blocking`.
- * ready() blocks until the awaitable has completed or the timeout expires.
*
- * Throws a TimeoutException if the timeout expires, as that is in the contract of `Awaitable.ready`.
- * @param awaitable the `Awaitable` on which `ready` is to be called
- * @param atMost the maximum timeout for which to wait
- * @return the result of `awaitable.ready` which is defined to be the awaitable itself.
+ * @param awaitable
+ * the `Awaitable` on which `ready` is to be called
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the awaitable itself
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type =
blocking(awaitable.ready(atMost)(AwaitPermission))
/**
+ * Await and return the result of this Awaitable, which is either of type T or a thrown exception (any Throwable).
* Invokes result() on the awaitable, properly wrapped by a call to `scala.concurrent.blocking`.
- * result() blocks until the awaitable has completed or the timeout expires.
*
- * Throws a TimeoutException if the timeout expires, or any exception thrown by `Awaitable.result`.
- * @param awaitable the `Awaitable` on which `result` is to be called
- * @param atMost the maximum timeout for which to wait
- * @return the result of `awaitable.result`
+ * @param awaitable
+ * the `Awaitable` on which `result` is to be called
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the value if the Awaitable was successful within the specific maximum wait time
+ * @throws InterruptedException if the wait call was interrupted
+ * @throws TimeoutException if after waiting for the specified time this Awaitable is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[Duration.Undefined]]
*/
@throws(classOf[Exception])
def result[T](awaitable: Awaitable[T], atMost: Duration): T =
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala
deleted file mode 100644
index bab664727e..0000000000
--- a/src/library/scala/concurrent/util/Duration.scala
+++ /dev/null
@@ -1,537 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.util
-
-import java.util.concurrent.TimeUnit
-import TimeUnit._
-import java.lang.{ Double => JDouble }
-import language.implicitConversions
-
-case class Deadline private (time: Duration) {
- def +(other: Duration): Deadline = copy(time = time + other)
- def -(other: Duration): Deadline = copy(time = time - other)
- def -(other: Deadline): Duration = time - other.time
- def timeLeft: Duration = this - Deadline.now
- def hasTimeLeft(): Boolean = !isOverdue()
- def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
-}
-
-object Deadline {
- def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
-}
-
-object Duration {
- implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft
-
- def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
- def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
-
- /**
- * Construct a Duration by parsing a String. In case of a format error, a
- * RuntimeException is thrown. See `unapply(String)` for more information.
- */
- def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error " + s)
-
- private val RE = ("""^\s*([\+|-]?\d+(?:\.\d+)?)\s*""" + // length part
- "(?:" + // units are distinguished in separate match groups
- "(d|day|days)|" +
- "(h|hour|hours)|" +
- "(min|minute|minutes)|" +
- "(s|sec|second|seconds)|" +
- "(ms|milli|millis|millisecond|milliseconds)|" +
- "(µs|micro|micros|microsecond|microseconds)|" +
- "(ns|nano|nanos|nanosecond|nanoseconds)" +
- """)\s*$""").r // close the non-capturing group
- private val REinf = """^\s*(?:\+|Plus)?Inf\s*$""".r
- private val REminf = """^\s*(?:-|Minus)Inf\s*""".r
-
- /**
- * Deconstruct a Duration into `Long` length and [[java.util.concurrent.TimeUnit]] if it is a
- * [[scala.util.concurrent.FiniteDuration]].
- *
- * @param d Duration to be deconstructed.
- */
- def unapply(d: Duration): Option[(Long, TimeUnit)] = {
- if (d.finite_?) {
- Some((d.length, d.unit))
- } else {
- None
- }
- }
-
- /**
- * Parse String, return None if no match. Format is `"<length><unit>"`, where
- * whitespace is allowed before, between and after the parts. Infinities are
- * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
- */
- def unapply(s: String): Option[Duration] = s match {
- case RE(length, d, h, m, s, ms, mus, ns) ⇒
- if (d ne null)
- Some(Duration(JDouble.parseDouble(length), DAYS))
- else if (h ne null)
- Some(Duration(JDouble.parseDouble(length), HOURS))
- else if (m ne null)
- Some(Duration(JDouble.parseDouble(length), MINUTES))
- else if (s ne null)
- Some(Duration(JDouble.parseDouble(length), SECONDS))
- else if (ms ne null)
- Some(Duration(JDouble.parseDouble(length), MILLISECONDS))
- else if (mus ne null)
- Some(Duration(JDouble.parseDouble(length), MICROSECONDS))
- else if (ns ne null)
- Some(Duration(JDouble.parseDouble(length), NANOSECONDS))
- else
- sys.error("made some error in regex (should not be possible)")
- case REinf() ⇒ Some(Inf)
- case REminf() ⇒ Some(MinusInf)
- case _ ⇒ None
- }
-
- def fromNanos(nanos: Double): FiniteDuration =
- fromNanos((nanos + 0.5).asInstanceOf[Long])
-
- def fromNanos(nanos: Long): FiniteDuration = {
- if (nanos % 86400000000000L == 0) {
- Duration(nanos / 86400000000000L, DAYS)
- } else if (nanos % 3600000000000L == 0) {
- Duration(nanos / 3600000000000L, HOURS)
- } else if (nanos % 60000000000L == 0) {
- Duration(nanos / 60000000000L, MINUTES)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000L == 0) {
- Duration(nanos / 1000000L, MILLISECONDS)
- } else if (nanos % 1000L == 0) {
- Duration(nanos / 1000L, MICROSECONDS)
- } else {
- Duration(nanos, NANOSECONDS)
- }
- }
-
- /**
- * Parse TimeUnit from string representation.
- */
- protected[util] def timeUnit(unit: String): TimeUnit = unit.toLowerCase match {
- case "d" | "day" | "days" => DAYS
- case "h" | "hour" | "hours" => HOURS
- case "min" | "minute" | "minutes" => MINUTES
- case "s" | "sec" | "second" | "seconds" => SECONDS
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" => MILLISECONDS
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" => MICROSECONDS
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" => NANOSECONDS
- }
-
- val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS)
- val Undefined: Duration = new Duration with Infinite {
- override def toString = "Duration.Undefined"
- override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
- override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
- override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
- override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
- override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
- override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
- def compare(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
- }
-
- trait Infinite {
- this: Duration =>
-
- def +(other: Duration): Duration =
- other match {
- case _: this.type => this
- case _: Infinite => throw new IllegalArgumentException("illegal addition of infinities")
- case _ => this
- }
- def -(other: Duration): Duration =
- other match {
- case _: this.type => throw new IllegalArgumentException("illegal subtraction of infinities")
- case _ => this
- }
- def *(factor: Double): Duration = this
- def /(factor: Double): Duration = this
- def /(other: Duration): Double =
- other match {
- case _: Infinite => throw new IllegalArgumentException("illegal division of infinities")
- // maybe questionable but pragmatic: Inf / 0 => Inf
- case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
- }
-
- def finite_? = false
-
- def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
- def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
- def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
- def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
- def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
- def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
- def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
- def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
- def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
- def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
-
- }
-
- /**
- * Infinite duration: greater than any other and not equal to any other,
- * including itself.
- */
- val Inf: Duration = new Duration with Infinite {
- override def toString = "Duration.Inf"
- def compare(other: Duration) = if (other eq this) 0 else 1
- def unary_- : Duration = MinusInf
- }
-
- /**
- * Infinite negative duration: lesser than any other and not equal to any other,
- * including itself.
- */
- val MinusInf: Duration = new Duration with Infinite {
- override def toString = "Duration.MinusInf"
- def compare(other: Duration) = if (other eq this) 0 else -1
- def unary_- : Duration = Inf
- }
-
- // Java Factories
- def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit)
- def create(length: Double, unit: TimeUnit): FiniteDuration = apply(length, unit)
- def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
- def parse(s: String): Duration = unapply(s).get
-
- implicit object DurationIsOrdered extends Ordering[Duration] {
- def compare(a: Duration, b: Duration) = a compare b
- }
-}
-
-/**
- * Utility for working with java.util.concurrent.TimeUnit durations.
- *
- * <p/>
- * Examples:
- * <pre>
- * import scala.concurrent.util.Duration
- * import java.util.concurrent.TimeUnit
- *
- * val duration = Duration(100, MILLISECONDS)
- * val duration = Duration(100, "millis")
- *
- * duration.toNanos
- * duration < 1.second
- * duration <= Duration.Inf
- * </pre>
- *
- * <p/>
- * Implicits are also provided for Int, Long and Double. Example usage:
- * <pre>
- * import scala.concurrent.util.Duration._
- *
- * val duration = 100 millis
- * </pre>
- *
- * Extractors, parsing and arithmetic are also included:
- * <pre>
- * val d = Duration("1.2 µs")
- * val Duration(length, unit) = 5 millis
- * val d2 = d * 2.5
- * val d3 = d2 + 1.millisecond
- * </pre>
- */
-abstract class Duration extends Serializable with Ordered[Duration] {
- def length: Long
- def unit: TimeUnit
- def toNanos: Long
- def toMicros: Long
- def toMillis: Long
- def toSeconds: Long
- def toMinutes: Long
- def toHours: Long
- def toDays: Long
- def toUnit(unit: TimeUnit): Double
-
- def +(other: Duration): Duration
- def -(other: Duration): Duration
- def *(factor: Double): Duration
- def /(factor: Double): Duration
- def /(other: Duration): Double
- def unary_- : Duration
- def finite_? : Boolean
- def min(other: Duration): Duration = if (this < other) this else other
- def max(other: Duration): Duration = if (this > other) this else other
- def fromNow: Deadline = Deadline.now + this
-
- // Java API
- def lt(other: Duration) = this < other
- def lteq(other: Duration) = this <= other
- def gt(other: Duration) = this > other
- def gteq(other: Duration) = this >= other
- def plus(other: Duration) = this + other
- def minus(other: Duration) = this - other
- def mul(factor: Double) = this * factor
- def div(factor: Double) = this / factor
- def div(other: Duration) = this / other
- def neg() = -this
- def isFinite() = finite_?
-}
-
-object FiniteDuration {
- implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] {
- def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
- }
-
- def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit)
-
- def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
-
-}
-
-class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
- import Duration._
-
- def toNanos = unit.toNanos(length)
- def toMicros = unit.toMicros(length)
- def toMillis = unit.toMillis(length)
- def toSeconds = unit.toSeconds(length)
- def toMinutes = unit.toMinutes(length)
- def toHours = unit.toHours(length)
- def toDays = unit.toDays(length)
- def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u)
-
- override def toString = this match {
- case Duration(1, DAYS) => "1 day"
- case Duration(x, DAYS) => x + " days"
- case Duration(1, HOURS) => "1 hour"
- case Duration(x, HOURS) => x + " hours"
- case Duration(1, MINUTES) => "1 minute"
- case Duration(x, MINUTES) => x + " minutes"
- case Duration(1, SECONDS) => "1 second"
- case Duration(x, SECONDS) => x + " seconds"
- case Duration(1, MILLISECONDS) => "1 millisecond"
- case Duration(x, MILLISECONDS) => x + " milliseconds"
- case Duration(1, MICROSECONDS) => "1 microsecond"
- case Duration(x, MICROSECONDS) => x + " microseconds"
- case Duration(1, NANOSECONDS) => "1 nanosecond"
- case Duration(x, NANOSECONDS) => x + " nanoseconds"
- }
-
- def compare(other: Duration) =
- if (other.finite_?) {
- val me = toNanos
- val o = other.toNanos
- if (me > o) 1 else if (me < o) -1 else 0
- } else -other.compare(this)
-
- def +(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def -(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def *(factor: Double) = fromNanos(toNanos.toDouble * factor)
-
- def /(factor: Double) = fromNanos(toNanos.toDouble / factor)
-
- def /(other: Duration) = if (other.finite_?) toNanos.toDouble / other.toNanos else 0
-
- def unary_- = Duration(-length, unit)
-
- def finite_? = true
-
- override def equals(other: Any) =
- other.isInstanceOf[FiniteDuration] &&
- toNanos == other.asInstanceOf[FiniteDuration].toNanos
-
- override def hashCode = toNanos.asInstanceOf[Int]
-}
-
-class DurationInt(n: Int) {
- import duration.Classifier
-
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
-
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
-
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
-
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
-
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
-
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
-
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
-}
-
-class DurationLong(n: Long) {
- import duration.Classifier
-
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
-
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
-
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
-
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
-
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
-
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
-
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
-}
-
-class DurationDouble(d: Double) {
- import duration.Classifier
-
- def nanoseconds = Duration(d, NANOSECONDS)
- def nanos = Duration(d, NANOSECONDS)
- def nanosecond = Duration(d, NANOSECONDS)
- def nano = Duration(d, NANOSECONDS)
-
- def microseconds = Duration(d, MICROSECONDS)
- def micros = Duration(d, MICROSECONDS)
- def microsecond = Duration(d, MICROSECONDS)
- def micro = Duration(d, MICROSECONDS)
-
- def milliseconds = Duration(d, MILLISECONDS)
- def millis = Duration(d, MILLISECONDS)
- def millisecond = Duration(d, MILLISECONDS)
- def milli = Duration(d, MILLISECONDS)
-
- def seconds = Duration(d, SECONDS)
- def second = Duration(d, SECONDS)
-
- def minutes = Duration(d, MINUTES)
- def minute = Duration(d, MINUTES)
-
- def hours = Duration(d, HOURS)
- def hour = Duration(d, HOURS)
-
- def days = Duration(d, DAYS)
- def day = Duration(d, DAYS)
-
- def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
- def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
- def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
- def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
-
- def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
- def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
- def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
- def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
-
- def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
- def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
- def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
- def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
-
- def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
- def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
-
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
-
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
-
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
-}
diff --git a/src/library/scala/concurrent/util/duration/Classifier.scala b/src/library/scala/concurrent/util/duration/Classifier.scala
deleted file mode 100644
index 10faf0a5ce..0000000000
--- a/src/library/scala/concurrent/util/duration/Classifier.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.concurrent.util.duration
-
-import scala.concurrent.util.{ FiniteDuration }
-
-trait Classifier[C] {
- type R
- def convert(d: FiniteDuration): R
-}
-
diff --git a/src/library/scala/concurrent/util/duration/IntMult.scala b/src/library/scala/concurrent/util/duration/IntMult.scala
deleted file mode 100644
index 94c58fb8c2..0000000000
--- a/src/library/scala/concurrent/util/duration/IntMult.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.concurrent.util.duration
-
-import scala.concurrent.util.{ Duration }
-
-/*
- * Avoid reflection based invocation by using non-duck type
- */
-protected[duration] class IntMult(i: Int) {
- def *(d: Duration) = d * i
-}
-
-protected[duration] class LongMult(i: Long) {
- def *(d: Duration) = d * i
-}
-
-protected[duration] class DoubleMult(f: Double) {
- def *(d: Duration) = d * f
-}
diff --git a/src/library/scala/concurrent/util/duration/package.scala b/src/library/scala/concurrent/util/duration/package.scala
deleted file mode 100644
index e3cf229c61..0000000000
--- a/src/library/scala/concurrent/util/duration/package.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.concurrent.util
-
-import java.util.concurrent.TimeUnit
-import language.implicitConversions
-
-package object duration {
-
- object span
- implicit object spanConvert extends Classifier[span.type] {
- type R = FiniteDuration
- def convert(d: FiniteDuration) = d
- }
-
- object fromNow
- implicit object fromNowConvert extends Classifier[fromNow.type] {
- type R = Deadline
- def convert(d: FiniteDuration) = Deadline.now + d
- }
-
- implicit def intToDurationInt(n: Int) = new DurationInt(n)
- implicit def longToDurationLong(n: Long) = new DurationLong(n)
- implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
-
- implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
- implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
- implicit def durationToPair(d: Duration) = (d.length, d.unit)
-
- implicit def intMult(i: Int) = new IntMult(i)
- implicit def longMult(l: Long) = new LongMult(l)
- implicit def doubleMult(f: Double) = new DoubleMult(f)
-}
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index 111affc904..5ad61b811a 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -8,7 +8,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
/** An annotation that designates that a definition is deprecated.
* Access to the member then generates a deprecated warning.
@@ -18,4 +18,4 @@ import annotation.meta._
* @since 2.3
*/
@getter @setter @beanGetter @beanSetter
-class deprecated(message: String = "", since: String = "") extends annotation.StaticAnnotation
+class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
new file mode 100644
index 0000000000..eb241d0d04
--- /dev/null
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -0,0 +1,22 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** An annotation that designates that inheriting from a class is deprecated.
+ *
+ * This is usually done to warn about a non-final class being made final in a future version.
+ * Sub-classing such a class then generates a warning.
+ *
+ * @param message the message to print during compilation if the class was sub-classed
+ * @param since a string identifying the first version in which inheritance was deprecated
+ * @since 2.10
+ * @see [[scala.deprecatedOverriding]]
+ */
+private[scala] // for now, this needs to be generalized to communicate other modifier deltas
+class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala
index cc36be6775..07b35d1a61 100644
--- a/src/library/scala/deprecatedName.scala
+++ b/src/library/scala/deprecatedName.scala
@@ -8,7 +8,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
/**
* An annotation that designates the name of the parameter to which it is
@@ -29,4 +29,4 @@ import annotation.meta._
* @since 2.8.1
*/
@param
-class deprecatedName(name: Symbol) extends annotation.StaticAnnotation
+class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala
new file mode 100644
index 0000000000..c9fd3af91b
--- /dev/null
+++ b/src/library/scala/deprecatedOverriding.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** An annotation that designates that overriding a member is deprecated.
+ *
+ * Overriding such a member in a sub-class then generates a warning.
+ *
+ * @param message the message to print during compilation if the member was overridden
+ * @param since a string identifying the first version in which overriding was deprecated
+ * @since 2.10
+ * @see [[scala.deprecatedInheritance]]
+ */
+private[scala] // for the same reasons as deprecatedInheritance
+class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala
index a182fdf9ca..42ae28a347 100644
--- a/src/library/scala/inline.scala
+++ b/src/library/scala/inline.scala
@@ -17,4 +17,4 @@ package scala
* @author Lex Spoon
* @version 1.0, 2007-5-21
*/
-class inline extends annotation.StaticAnnotation
+class inline extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
index 3bb5ea9c2b..a199986141 100644
--- a/src/library/scala/io/BytePickle.scala
+++ b/src/library/scala/io/BytePickle.scala
@@ -19,6 +19,7 @@ import scala.collection.mutable
* @author Philipp Haller
* @version 1.1
*/
+@deprecated("This class will be removed.", "2.10.0")
object BytePickle {
abstract class SPU[T] {
def appP(a: T, state: PicklerState): PicklerState
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 84cac88dcc..6522cd0cd8 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -10,8 +10,8 @@
package scala.io
import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
-import annotation.migration
-import language.implicitConversions
+import scala.annotation.migration
+import scala.language.implicitConversions
// Some notes about encodings for use in refining this implementation.
//
@@ -91,7 +91,7 @@ object Codec extends LowPriorityCodecImplicits {
* as an accident, with any anomalies considered "not a bug".
*/
def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(util.Properties.encodingString)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 5d1e695add..dae478f31a 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -32,6 +32,7 @@ package scala.io
* }}}
* @author Burak Emir (translated from work by Matthias Zenger and others)
*/
+@deprecated("This class will be removed.", "2.10.0")
abstract class Position {
/** Definable behavior for overflow conditions.
*/
@@ -53,7 +54,7 @@ abstract class Position {
if (line >= LINE_MASK)
LINE_MASK << COLUMN_BITS
else
- (line << COLUMN_BITS) | math.min(COLUMN_MASK, column)
+ (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column)
}
/** Returns the line number of the encoded position. */
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
index df0a36ef21..aa6cccf1d1 100644
--- a/src/library/scala/io/UTF8Codec.scala
+++ b/src/library/scala/io/UTF8Codec.scala
@@ -13,6 +13,7 @@ package scala.io
* @author Martin Odersky
* @version 1.0, 04/10/2004
*/
+@deprecated("This class will be removed.", "2.10.0")
object UTF8Codec {
final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index df2eb0b910..297f344f65 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -17,7 +17,7 @@ object language {
* of programs. Furthermore, dynamic member selection often relies on reflection,
* which is not available on all platforms.
*/
- implicit lazy val dynamics: dynamics = ???
+ implicit lazy val dynamics: dynamics = languageFeature.dynamics
/** Only where enabled, postfix operator notation `(expr op)` will be allowed.
*
@@ -26,10 +26,10 @@ object language {
* _Why control it?_ Postfix operators interact poorly with semicolon inference.
* Most programmers avoid them for this reason.
*/
- implicit lazy val postfixOps: postfixOps = ???
+ implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
/** Only where enabled, accesses to members of structural types that need
- * reflection are supported. Reminder: A structural type is a type of the form
+ * reflection are supported. Reminder: A structural type is a type of the form
* `Parents { Decls }` where `Decls` contains declarations of new members that do
* not override any member in `Parents`. To access one of these members, a
* reflective call is needed.
@@ -42,7 +42,7 @@ object language {
* such as ProGuard have problems dealing with it. Even where reflection is available,
* reflective dispatch can lead to surprising performance degradations.
*/
- implicit lazy val reflectiveCalls: reflectiveCalls = ???
+ implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls
/** Only where enabled, definitions of implicit conversions are allowed. An
* implicit conversion is an implicit value of unary function type `A => B`,
@@ -52,7 +52,7 @@ object language {
* implicit def stringToInt(s: String): Int = s.length
* implicit val conv = (s: String) => s.length
* implicit def listToX(xs: List[T])(implicit f: T => X): X = …
- *
+ *
* implicit values of other types are not affected, and neither are implicit
* classes.
*
@@ -65,7 +65,7 @@ object language {
* most situations using implicit parameters leads to a better design than
* implicit conversions.
*/
- implicit lazy val implicitConversions: implicitConversions = ???
+ implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions
/** Only where this flag is enabled, higher-kinded types can be written.
*
@@ -86,7 +86,7 @@ object language {
* enabling also serves as a warning that code involving higher-kinded types
* might have to be slightly revised in the future.
*/
- implicit lazy val higherKinds: higherKinds = ???
+ implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds
/** Only where enabled, existential types that cannot be expressed as wildcard
* types can be written and are allowed in inferred types of values or return
@@ -95,14 +95,14 @@ object language {
*
* _Why keep the feature?_ Existential types are needed to make sense of Java’s wildcard
* types and raw types and the erased types of run-time values.
- *
+ *
* Why control it? Having complex existential types in a code base usually makes
* application code very brittle, with a tendency to produce type errors with
* obscure error messages. Therefore, going overboard with existential types
* is generally perceived not to be a good idea. Also, complicated existential types
* might be no longer supported in a future simplification of the language.
*/
- implicit lazy val existentials: existentials = ???
+ implicit lazy val existentials: existentials = languageFeature.existentials
object experimental {
@@ -110,7 +110,7 @@ object language {
/** Where enabled, macro definitions are allowed. Macro implementations and
* macro applications are unaffected; they can be used anywhere.
- *
+ *
* _Why introduce the feature?_ Macros promise to make the language more regular,
* replacing ad-hoc language constructs with a general powerful abstraction
* capability that can express them. Macros are also a more disciplined and
@@ -119,6 +119,6 @@ object language {
* _Why control it?_ For their very power, macros can lead to code that is hard
* to debug and understand.
*/
- implicit lazy val macros: macros = ???
+ implicit lazy val macros: macros = languageFeature.experimental.macros
}
}
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
index c32f1eb724..1f411c412a 100644
--- a/src/library/scala/languageFeature.scala
+++ b/src/library/scala/languageFeature.scala
@@ -1,30 +1,37 @@
package scala
-import annotation.meta
+import scala.annotation.meta
object languageFeature {
@meta.languageFeature("extension of type scala.Dynamic", enableRequired = true)
sealed trait dynamics
+ object dynamics extends dynamics
@meta.languageFeature("postfix operator #", enableRequired = false)
sealed trait postfixOps
+ object postfixOps extends postfixOps
@meta.languageFeature("reflective access of structural type member #", enableRequired = false)
sealed trait reflectiveCalls
+ object reflectiveCalls extends reflectiveCalls
@meta.languageFeature("implicit conversion #", enableRequired = false)
sealed trait implicitConversions
+ object implicitConversions extends implicitConversions
@meta.languageFeature("higher-kinded type", enableRequired = false)
sealed trait higherKinds
+ object higherKinds extends higherKinds
@meta.languageFeature("#, which cannot be expressed by wildcards, ", enableRequired = false)
sealed trait existentials
+ object existentials extends existentials
object experimental {
@meta.languageFeature("macro definition", enableRequired = true)
sealed trait macros
+ object macros extends macros
}
}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 74daa510ca..eb73d58d1c 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -12,7 +12,7 @@ package scala.math
import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
import scala.collection.immutable.NumericRange
-import language.implicitConversions
+import scala.language.implicitConversions
/**
@@ -159,6 +159,7 @@ object BigDecimal {
* @author Stephane Micheloud
* @version 1.0
*/
+@deprecatedInheritance("This class will me made final.", "2.10.0")
class BigDecimal(
val bigDecimal: BigDec,
val mc: MathContext)
@@ -211,7 +212,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
catch { case _: ArithmeticException => false }
}
- protected[math] def isWhole = (this remainder 1) == BigDecimal(0)
+ def isWhole() = (this remainder 1) == BigDecimal(0)
def underlying = bigDecimal
/** Compares this BigDecimal with the specified BigDecimal for equality.
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 4471e417d9..3eb41053f7 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -9,7 +9,7 @@
package scala.math
import java.math.BigInteger
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @author Martin Odersky
@@ -114,6 +114,7 @@ object BigInt {
* @author Martin Odersky
* @version 1.0, 15/07/2003
*/
+@deprecatedInheritance("This class will me made final.", "2.10.0")
class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
@@ -162,7 +163,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
}
/** Some implementations of java.math.BigInteger allow huge values with bit length greater than Int.MaxValue .
* The BigInteger.bitLength method returns truncated bit length in this case .
- * This method tests if result of bitLength is valid.
+ * This method tests if result of bitLength is valid.
* This method will become unnecessary if BigInt constructors reject huge BigIntegers.
*/
private def bitLengthOverflow = {
@@ -170,7 +171,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
(shifted.signum != 0) && !(shifted equals BigInt.minusOne)
}
- protected[math] def isWhole = true
+ def isWhole() = true
def underlying = bigInteger
/** Compares this BigInt with the specified BigInt for equality.
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index 0686569c16..98fd325980 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -8,7 +8,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @since 2.8
@@ -28,4 +28,4 @@ object Fractional {
implicit def infixFractionalOps[T](x: T)(implicit num: Fractional[T]): Fractional[T]#FractionalOps = new num.FractionalOps(x)
}
object Implicits extends ExtraImplicits
-} \ No newline at end of file
+}
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index 4b4de28228..e5bfc8f687 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -10,7 +10,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @since 2.8
@@ -36,4 +36,4 @@ object Integral {
implicit def infixIntegralOps[T](x: T)(implicit num: Integral[T]): Integral[T]#IntegralOps = new num.IntegralOps(x)
}
object Implicits extends ExtraImplicits
-} \ No newline at end of file
+}
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index ee62706e49..4428d9c249 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -8,7 +8,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* @since 2.8
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index a5efa41b80..53b3ae81cb 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -8,7 +8,7 @@
package scala.math
-import language.implicitConversions
+import scala.language.implicitConversions
/** A trait for data that have a single, natural ordering. See
* [[scala.math.Ordering]] before using this trait for
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 9020bb9edd..719f2e12a7 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
-import language.{implicitConversions, higherKinds}
+import scala.language.{implicitConversions, higherKinds}
/** Ordering is a trait whose instances each represent a strategy for sorting
* instances of a type.
@@ -165,7 +166,7 @@ object Ordering extends LowPriorityOrderingImplicits {
/** Not in the standard scope due to the potential for divergence:
* For instance `implicitly[Ordering[Any]]` diverges in its presence.
*/
- implicit def seqDerivedOrdering[CC[X] <: collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] =
+ implicit def seqDerivedOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] =
new Ordering[CC[T]] {
def compare(x: CC[T], y: CC[T]): Int = {
val xe = x.iterator
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index 2b7ef7405c..edf243e5df 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -13,7 +13,17 @@ import java.{ lang => jl }
/** Conversions which present a consistent conversion interface
* across all the numeric types.
*/
-trait ScalaNumericConversions extends ScalaNumber {
+trait ScalaNumericConversions extends Any {
+ def isWhole(): Boolean
+ def underlying(): Any
+
+ def byteValue(): Byte = intValue().toByte
+ def shortValue(): Short = intValue().toShort
+ def intValue(): Int
+ def longValue(): Long
+ def floatValue(): Float
+ def doubleValue(): Double
+
/** Returns the value of this as a [[scala.Char]]. This may involve
* rounding or truncation.
*/
diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala
index 28bb9f70fd..798af3a5da 100644
--- a/src/library/scala/native.scala
+++ b/src/library/scala/native.scala
@@ -20,4 +20,4 @@ package scala
* but it is type checked when present.
*
* @since 2.6 */
-class native extends annotation.StaticAnnotation {}
+class native extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala
index de650ed209..7cb9b3d53c 100644
--- a/src/library/scala/noinline.scala
+++ b/src/library/scala/noinline.scala
@@ -18,4 +18,4 @@ package scala
* @version 1.0, 2007-5-21
* @since 2.5
*/
-class noinline extends annotation.StaticAnnotation
+class noinline extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 7b6df6e31c..5c2067a548 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -1,24 +1,25 @@
-package scala.reflect
+package scala
+package reflect
import java.lang.{ Class => jClass }
-import language.{implicitConversions, existentials}
-import scala.runtime.ScalaRunTime.arrayClass
+import scala.language.{implicitConversions, existentials}
+import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
-/** A `ClassTag[T]` wraps a runtime class, which can be accessed via the `runtimeClass` method.
+/** A `ClassTag[T]` wraps a runtime class (the erasure) and can create array instances.
*
- * This is useful in itself, but also enables very important use case.
- * Having this knowledge ClassTag can instantiate `Arrays`
- * in those cases where the element type is unknown at compile time.
+ * If an implicit value of type ClassTag[T] is requested, the compiler will create one.
+ * The runtime class (i.e. the erasure, a java.lang.Class on the JVM) of T can be accessed
+ * via the `runtimeClass` field. References to type parameters or abstract type members are
+ * replaced by the concrete types if ClassTags are available for them.
*
- * If an implicit value of type u.ClassTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its `runtimeClass` field the runtime class that is the result of erasing type T.
- * In that value, any occurrences of type parameters or abstract types U which come themselves with a ClassTag
- * are represented by the type referenced by that tag.
- * If the type T contains unresolved references to type parameters or abstract types, a static error results.
+ * Besides accessing the erasure, a ClassTag knows how to instantiate single- and multi-
+ * dimensional `Arrays` where the element type is unknown at compile time.
*
- * @see [[scala.reflect.base.TypeTags]]
+ * [[scala.reflect.ClassTag]] corresponds to a previous concept of [[scala.reflect.ClassManifest]].
+ *
+ * @see [[scala.reflect.api.TypeTags]]
*/
-@annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
+@scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
// please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
// class tags, and all tags in general, should be as minimalistic as possible
@@ -54,40 +55,62 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
* `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
* is uncheckable, but we have an instance of `ClassTag[T]`.
*/
- def unapply(x: Any): Option[T] = if (x != null && runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[T]) else None
+ def unapply(x: Any): Option[T] = unapply_impl(x)
+ def unapply(x: Byte): Option[T] = unapply_impl(x)
+ def unapply(x: Short): Option[T] = unapply_impl(x)
+ def unapply(x: Char): Option[T] = unapply_impl(x)
+ def unapply(x: Int): Option[T] = unapply_impl(x)
+ def unapply(x: Long): Option[T] = unapply_impl(x)
+ def unapply(x: Float): Option[T] = unapply_impl(x)
+ def unapply(x: Double): Option[T] = unapply_impl(x)
+ def unapply(x: Boolean): Option[T] = unapply_impl(x)
+ def unapply(x: Unit): Option[T] = unapply_impl(x)
+
+ private def unapply_impl[U: ClassTag](x: U): Option[T] =
+ if (x == null) None
+ else {
+ val staticClass = classTag[U].runtimeClass
+ val dynamicClass = x.getClass
+ val effectiveClass = if (staticClass.isPrimitive) staticClass else dynamicClass
+ val conforms = runtimeClass.isAssignableFrom(effectiveClass)
+ if (conforms) Some(x.asInstanceOf[T]) else None
+ }
- /** case class accessories */
+ // case class accessories
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass
override def hashCode = scala.runtime.ScalaRunTime.hash(runtimeClass)
- override def toString = "ClassTag[" + runtimeClass + "]"
+ override def toString = {
+ def prettyprint(clazz: jClass[_]): String =
+ if (clazz.isArray) s"Array[${prettyprint(arrayElementClass(clazz))}]" else
+ clazz.getName
+ prettyprint(runtimeClass)
+ }
}
+/**
+ * Class tags corresponding to primitive types and constructor/extractor for ClassTags.
+ */
object ClassTag {
- private val NullTYPE = classOf[scala.runtime.Null$]
private val ObjectTYPE = classOf[java.lang.Object]
+ private val NothingTYPE = classOf[scala.runtime.Nothing$]
+ private val NullTYPE = classOf[scala.runtime.Null$]
- val Byte : ClassTag[scala.Byte] = new ClassTag[scala.Byte]{ def runtimeClass = java.lang.Byte.TYPE; private def readResolve() = ClassTag.Byte }
- val Short : ClassTag[scala.Short] = new ClassTag[scala.Short]{ def runtimeClass = java.lang.Short.TYPE; private def readResolve() = ClassTag.Short }
- val Char : ClassTag[scala.Char] = new ClassTag[scala.Char]{ def runtimeClass = java.lang.Character.TYPE; private def readResolve() = ClassTag.Char }
- val Int : ClassTag[scala.Int] = new ClassTag[scala.Int]{ def runtimeClass = java.lang.Integer.TYPE; private def readResolve() = ClassTag.Int }
- val Long : ClassTag[scala.Long] = new ClassTag[scala.Long]{ def runtimeClass = java.lang.Long.TYPE; private def readResolve() = ClassTag.Long }
- val Float : ClassTag[scala.Float] = new ClassTag[scala.Float]{ def runtimeClass = java.lang.Float.TYPE; private def readResolve() = ClassTag.Float }
- val Double : ClassTag[scala.Double] = new ClassTag[scala.Double]{ def runtimeClass = java.lang.Double.TYPE; private def readResolve() = ClassTag.Double }
- val Boolean : ClassTag[scala.Boolean] = new ClassTag[scala.Boolean]{ def runtimeClass = java.lang.Boolean.TYPE; private def readResolve() = ClassTag.Boolean }
- val Unit : ClassTag[scala.Unit] = new ClassTag[scala.Unit]{ def runtimeClass = java.lang.Void.TYPE; private def readResolve() = ClassTag.Unit }
- val Any : ClassTag[scala.Any] = new ClassTag[scala.Any]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Any }
- val Object : ClassTag[java.lang.Object] = new ClassTag[java.lang.Object]{ def runtimeClass = ObjectTYPE; private def readResolve() = ClassTag.Object }
- val AnyVal : ClassTag[scala.AnyVal] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyVal]]
- val AnyRef : ClassTag[scala.AnyRef] = ClassTag.Object.asInstanceOf[ClassTag[scala.AnyRef]]
- val Nothing : ClassTag[scala.Nothing] = new ClassTag[scala.Nothing]{
- def runtimeClass = throw new Exception("Nothing is a bottom type, therefore its erasure does not return a value")
- private def readResolve() = ClassTag.Nothing
- override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && (x.asInstanceOf[AnyRef] eq ClassTag.Nothing)
- override def hashCode = System.identityHashCode(this)
- override def toString = "ClassTag[Nothing]"
- }
- val Null : ClassTag[scala.Null] = new ClassTag[scala.Null]{ def runtimeClass = NullTYPE; private def readResolve() = ClassTag.Null }
+ val Byte : ClassTag[scala.Byte] = Manifest.Byte
+ val Short : ClassTag[scala.Short] = Manifest.Short
+ val Char : ClassTag[scala.Char] = Manifest.Char
+ val Int : ClassTag[scala.Int] = Manifest.Int
+ val Long : ClassTag[scala.Long] = Manifest.Long
+ val Float : ClassTag[scala.Float] = Manifest.Float
+ val Double : ClassTag[scala.Double] = Manifest.Double
+ val Boolean : ClassTag[scala.Boolean] = Manifest.Boolean
+ val Unit : ClassTag[scala.Unit] = Manifest.Unit
+ val Any : ClassTag[scala.Any] = Manifest.Any
+ val Object : ClassTag[java.lang.Object] = Manifest.Object
+ val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal
+ val AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef
+ val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing
+ val Null : ClassTag[scala.Null] = Manifest.Null
def apply[T](runtimeClass1: jClass[_]): ClassTag[T] =
runtimeClass1 match {
@@ -101,8 +124,10 @@ object ClassTag {
case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
+ case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]]
+ case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]]
case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 }
}
def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass)
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 9347f5b6bb..8b021e0444 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -38,7 +38,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
}}}
*
*/
-@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+@scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
@deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
@@ -155,28 +155,34 @@ object ManifestFactory {
private def readResolve(): Any = Manifest.Unit
}
- val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any]("Any") {
+ private val ObjectTYPE = classOf[java.lang.Object]
+ private val NothingTYPE = classOf[scala.runtime.Nothing$]
+ private val NullTYPE = classOf[scala.runtime.Null$]
+
+ val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any](ObjectTYPE, "Any") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
private def readResolve(): Any = Manifest.Any
}
- val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object]("Object") {
+ val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object](ObjectTYPE, "Object") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.Object
}
- val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal]("AnyVal") {
+ val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]]
+
+ val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
private def readResolve(): Any = Manifest.AnyVal
}
- val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null]("Null") {
+ val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null](NullTYPE, "Null") {
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
private def readResolve(): Any = Manifest.Null
}
- val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing]("Nothing") {
+ val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") {
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
private def readResolve(): Any = Manifest.Nothing
}
@@ -211,7 +217,8 @@ object ManifestFactory {
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
- private abstract class PhantomManifest[T](override val toString: String) extends ClassTypeManifest[T](None, classOf[java.lang.Object], Nil) {
+ private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_],
+ override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) {
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
override val hashCode = System.identityHashCode(this)
}
@@ -257,4 +264,4 @@ object ManifestFactory {
def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index ff56e20d52..77cbd20321 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** Provides functions to encode and decode Scala symbolic names.
* Also provides some constants.
diff --git a/src/library/scala/reflect/ScalaLongSignature.java b/src/library/scala/reflect/ScalaLongSignature.java
index fce58207f1..5b6d78f446 100644
--- a/src/library/scala/reflect/ScalaLongSignature.java
+++ b/src/library/scala/reflect/ScalaLongSignature.java
@@ -5,9 +5,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
-/**
- * [Martin to Eugene++] Todo: Move to somewhere else?
-*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ScalaLongSignature {
diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java
index f0df99fe79..a8af554d2b 100644
--- a/src/library/scala/reflect/ScalaSignature.java
+++ b/src/library/scala/reflect/ScalaSignature.java
@@ -5,8 +5,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
-/** * [Martin to Eugene++] Todo: Move to somewhere else?
- */
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ScalaSignature {
diff --git a/src/library/scala/reflect/base/AnnotationInfos.scala b/src/library/scala/reflect/base/AnnotationInfos.scala
deleted file mode 100644
index f03644deef..0000000000
--- a/src/library/scala/reflect/base/AnnotationInfos.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.reflect
-package base
-
-trait AnnotationInfos { self: Universe =>
-
- type AnnotationInfo >: Null <: AnyRef
- implicit val AnnotationInfoTag: ClassTag[AnnotationInfo]
- val AnnotationInfo: AnnotationInfoExtractor
-
- abstract class AnnotationInfoExtractor {
- def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
- def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
- }
-
- type ClassfileAnnotArg >: Null <: AnyRef
- implicit val ClassfileAnnotArgTag: ClassTag[ClassfileAnnotArg]
-
- type LiteralAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val LiteralAnnotArgTag: ClassTag[LiteralAnnotArg]
- val LiteralAnnotArg: LiteralAnnotArgExtractor
-
- abstract class LiteralAnnotArgExtractor {
- def apply(const: Constant): LiteralAnnotArg
- def unapply(arg: LiteralAnnotArg): Option[Constant]
- }
-
- type ArrayAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val ArrayAnnotArgTag: ClassTag[ArrayAnnotArg]
- val ArrayAnnotArg: ArrayAnnotArgExtractor
-
- abstract class ArrayAnnotArgExtractor {
- def apply(args: Array[ClassfileAnnotArg]): ArrayAnnotArg
- def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
- }
-
- type NestedAnnotArg >: Null <: AnyRef with ClassfileAnnotArg
- implicit val NestedAnnotArgTag: ClassTag[NestedAnnotArg]
- val NestedAnnotArg: NestedAnnotArgExtractor
-
- abstract class NestedAnnotArgExtractor {
- def apply(annInfo: AnnotationInfo): NestedAnnotArg
- def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Attachments.scala b/src/library/scala/reflect/base/Attachments.scala
deleted file mode 100644
index 43e870fc4f..0000000000
--- a/src/library/scala/reflect/base/Attachments.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.reflect
-package base
-
-/** Attachments is a generalisation of Position.
- * Typically it stores a Position of a tree, but this can be extended to encompass arbitrary payloads.
- *
- * Attachments have to carry positions, because we don't want to introduce even a single additional field in Tree
- * imposing an unnecessary memory tax because of something that will not be used in most cases.
- */
-abstract class Attachments { self =>
-
- type Pos >: Null
-
- /** Gets the underlying position */
- def pos: Pos
-
- /** Creates a copy of this attachment with its position updated */
- def withPos(newPos: Pos): Attachments { type Pos = self.Pos }
-
- /** Gets the underlying payload */
- def all: Set[Any] = Set.empty
-
- def get[T: ClassTag]: Option[T] =
- (all find (_.getClass == classTag[T].runtimeClass)).asInstanceOf[Option[T]]
-
- /** Creates a copy of this attachment with its payload updated */
- def add(attachment: Any): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, all + attachment)
-
- def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
- val newAll = all filterNot (_.getClass == classTag[T].runtimeClass)
- if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
- else new NonemptyAttachments(this.pos, newAll)
- }
-
- private class NonemptyAttachments(override val pos: Pos, override val all: Set[Any]) extends Attachments {
- type Pos = self.Pos
- def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
- }
-}
-
-
diff --git a/src/library/scala/reflect/base/Base.scala b/src/library/scala/reflect/base/Base.scala
deleted file mode 100644
index 714fd365ef..0000000000
--- a/src/library/scala/reflect/base/Base.scala
+++ /dev/null
@@ -1,778 +0,0 @@
-package scala.reflect
-package base
-
-import java.io.PrintWriter
-import scala.annotation.switch
-import scala.ref.WeakReference
-import collection.mutable
-
-class Base extends Universe { self =>
-
- private var nextId = 0
-
- abstract class Symbol(val name: Name, val flags: FlagSet) extends SymbolBase {
- val id = { nextId += 1; nextId }
- def owner: Symbol
- def fullName: String =
- if (isEffectiveRoot || owner.isEffectiveRoot) name.toString else owner.fullName + "." + name
- private def isEffectiveRoot =
- this == NoSymbol || this == rootMirror.RootClass || this == rootMirror.EmptyPackageClass
-
- def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol =
- new TermSymbol(this, name, flags)
-
- def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = {
- val c = new ModuleClassSymbol(this, name.toTypeName, flags)
- val m = new ModuleSymbol(this, name.toTermName, flags, c)
- (m, c)
- }
-
- def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
- = new MethodSymbol(this, name, flags)
-
- def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol =
- new TypeSymbol(this, name, flags)
-
- def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol =
- new ClassSymbol(this, name, flags)
-
- def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null) =
- new FreeTermSymbol(this, name, flags)
-
- def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null) =
- new FreeTypeSymbol(this, name, flags)
-
- private def kindString: String =
- if (isModule) "module"
- else if (isClass) "class"
- else if (isFreeType) "free type"
- else if (isType) "type"
- else if (isMethod) "method"
- else if (isFreeTerm) "free term"
- else if (isTerm) "value"
- else "symbol"
- override def toString() = s"$kindString $name"
- }
- implicit val SymbolTag = ClassTag[Symbol](classOf[Symbol])
-
- class TermSymbol(val owner: Symbol, override val name: TermName, flags: FlagSet)
- extends Symbol(name, flags) with TermSymbolBase
- implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
-
- class TypeSymbol(val owner: Symbol, override val name: TypeName, flags: FlagSet)
- extends Symbol(name, flags) with TypeSymbolBase {
- override def toTypeConstructor = TypeRef(ThisType(owner), this, Nil)
- override def toType = TypeRef(ThisType(owner), this, Nil)
- override def toTypeIn(site: Type) = TypeRef(ThisType(owner), this, Nil)
- }
- implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
-
- class MethodSymbol(owner: Symbol, name: TermName, flags: FlagSet)
- extends TermSymbol(owner, name, flags) with MethodSymbolBase
- implicit val MethodSymbolTag = ClassTag[MethodSymbol](classOf[MethodSymbol])
-
- class ModuleSymbol(owner: Symbol, name: TermName, flags: FlagSet, override val moduleClass: Symbol)
- extends TermSymbol(owner, name, flags) with ModuleSymbolBase
- implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
-
- class ClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
- extends TypeSymbol(owner, name, flags) with ClassSymbolBase
- class ModuleClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
- extends ClassSymbol(owner, name, flags) { override def isModuleClass = true }
- implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
-
- class FreeTermSymbol(owner: Symbol, name: TermName, flags: FlagSet)
- extends TermSymbol(owner, name, flags) with FreeTermSymbolBase
- implicit val FreeTermSymbolTag = ClassTag[FreeTermSymbol](classOf[FreeTermSymbol])
-
- class FreeTypeSymbol(owner: Symbol, name: TypeName, flags: FlagSet)
- extends TypeSymbol(owner, name, flags) with FreeTypeSymbolBase
- implicit val FreeTypeSymbolTag = ClassTag[FreeTypeSymbol](classOf[FreeTypeSymbol])
-
-
- object NoSymbol extends Symbol(nme.NO_NAME, NoFlags) {
- override def owner = throw new UnsupportedOperationException("NoSymbol.owner")
- }
-
- // todo. write a decent toString that doesn't crash on recursive types
- class Type extends TypeBase {
- def termSymbol: Symbol = NoSymbol
- def typeSymbol: Symbol = NoSymbol
- }
- implicit val TypeTagg = ClassTag[Type](classOf[Type])
-
- val NoType = new Type { override def toString = "NoType" }
- val NoPrefix = new Type { override def toString = "NoPrefix" }
-
- class SingletonType extends Type
- implicit val SingletonTypeTag = ClassTag[SingletonType](classOf[SingletonType])
-
- case class ThisType(sym: Symbol) extends SingletonType { override val typeSymbol = sym }
- object ThisType extends ThisTypeExtractor
- implicit val ThisTypeTag = ClassTag[ThisType](classOf[ThisType])
-
- case class SingleType(pre: Type, sym: Symbol) extends SingletonType { override val termSymbol = sym }
- object SingleType extends SingleTypeExtractor
- implicit val SingleTypeTag = ClassTag[SingleType](classOf[SingleType])
-
- case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType
- object SuperType extends SuperTypeExtractor
- implicit val SuperTypeTag = ClassTag[SuperType](classOf[SuperType])
-
- case class ConstantType(value: Constant) extends SingletonType
- object ConstantType extends ConstantTypeExtractor
- implicit val ConstantTypeTag = ClassTag[ConstantType](classOf[ConstantType])
-
- case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type { override val typeSymbol = sym }
- object TypeRef extends TypeRefExtractor
- implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
-
- abstract class CompoundType extends Type
- implicit val CompoundTypeTag = ClassTag[CompoundType](classOf[CompoundType])
-
- case class RefinedType(parents: List[Type], decls: Scope) extends CompoundType
- object RefinedType extends RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType =
- RefinedType(parents, decls)
- }
- implicit val RefinedTypeTag = ClassTag[RefinedType](classOf[RefinedType])
-
- case class ClassInfoType(parents: List[Type], decls: Scope, override val typeSymbol: Symbol) extends CompoundType
- object ClassInfoType extends ClassInfoTypeExtractor
- implicit val ClassInfoTypeTag = ClassTag[ClassInfoType](classOf[ClassInfoType])
-
- case class MethodType(params: List[Symbol], resultType: Type) extends Type
- object MethodType extends MethodTypeExtractor
- implicit val MethodTypeTag = ClassTag[MethodType](classOf[MethodType])
-
- case class NullaryMethodType(resultType: Type) extends Type
- object NullaryMethodType extends NullaryMethodTypeExtractor
- implicit val NullaryMethodTypeTag = ClassTag[NullaryMethodType](classOf[NullaryMethodType])
-
- case class PolyType(typeParams: List[Symbol], resultType: Type) extends Type
- object PolyType extends PolyTypeExtractor
- implicit val PolyTypeTag = ClassTag[PolyType](classOf[PolyType])
-
- case class ExistentialType(quantified: List[Symbol], underlying: Type) extends Type { override def typeSymbol = underlying.typeSymbol }
- object ExistentialType extends ExistentialTypeExtractor
- implicit val ExistentialTypeTag = ClassTag[ExistentialType](classOf[ExistentialType])
-
- case class AnnotatedType(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol) extends Type { override def typeSymbol = underlying.typeSymbol }
- object AnnotatedType extends AnnotatedTypeExtractor
- implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
-
- case class TypeBounds(lo: Type, hi: Type) extends Type
- object TypeBounds extends TypeBoundsExtractor
- implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
-
- val WildcardType = new Type
-
- case class BoundedWildcardType(bounds: TypeBounds) extends Type
- object BoundedWildcardType extends BoundedWildcardTypeExtractor
- implicit val BoundedWildcardTypeTag = ClassTag[BoundedWildcardType](classOf[BoundedWildcardType])
-
- class Scope(elems: Iterable[Symbol]) extends ScopeBase with MemberScopeBase {
- def iterator = elems.iterator
- def sorted = elems.toList
- }
- type MemberScope = Scope
- implicit val ScopeTag = ClassTag[Scope](classOf[Scope])
- implicit val MemberScopeTag = ClassTag[MemberScope](classOf[MemberScope])
-
- def newScope: Scope = newScopeWith()
- def newNestedScope(outer: Scope): Scope = newScope
- def newScopeWith(elems: Symbol*): Scope = new Scope(elems)
-
- abstract class Name(str: String) extends NameBase {
- override def toString = str
- }
- implicit val NameTag = ClassTag[Name](classOf[Name])
-
- class TermName(str: String) extends Name(str) {
- def isTermName = true
- def isTypeName = false
- def toTermName = this
- def toTypeName = new TypeName(str)
- }
- implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
-
- class TypeName(str: String) extends Name(str) {
- def isTermName = false
- def isTypeName = true
- def toTermName = new TermName(str)
- def toTypeName = this
- }
- implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
-
- def newTermName(str: String) = new TermName(str)
- def newTypeName(str: String) = new TypeName(str)
-
- object nme extends TermNamesBase {
- type NameType = TermName
- val WILDCARD = newTermName("_")
- val CONSTRUCTOR = newTermName("<init>")
- val ROOTPKG = newTermName("_root_")
- val EMPTY = newTermName("")
- val EMPTY_PACKAGE_NAME = newTermName("<empty>")
- val ROOT = newTermName("<root>")
- val NO_NAME = newTermName("<none>")
- }
-
- object tpnme extends TypeNamesBase {
- type NameType = TypeName
- val WILDCARD = nme.WILDCARD.toTypeName
- val EMPTY = nme.EMPTY.toTypeName
- val WILDCARD_STAR = newTypeName("_*")
- val EMPTY_PACKAGE_NAME = nme.EMPTY_PACKAGE_NAME.toTypeName
- val ROOT = nme.ROOT.toTypeName
- }
-
- type FlagSet = Long
- val NoFlags = 0L
- implicit val FlagSetTag = ClassTag[FlagSet](classOf[FlagSet])
-
- class Modifiers(override val flags: FlagSet,
- override val privateWithin: Name,
- override val annotations: List[Tree]) extends ModifiersBase {
- def hasFlag(flags: FlagSet) = (this.flags & flags) != 0
- }
-
- implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers])
-
- object Modifiers extends ModifiersCreator {
- def apply(flags: Long,
- privateWithin: Name,
- annotations: List[Tree]) = new Modifiers(flags, privateWithin, annotations)
- }
-
- case class Constant(value: Any)
- object Constant extends ConstantExtractor
- implicit val ConstantTag = ClassTag[Constant](classOf[Constant])
-
- case class AnnotationInfo(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)])
- object AnnotationInfo extends AnnotationInfoExtractor
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
-
- abstract class ClassfileAnnotArg
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
-
- case class LiteralAnnotArg(const: Constant) extends ClassfileAnnotArg
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
-
- case class ArrayAnnotArg(args: Array[ClassfileAnnotArg]) extends ClassfileAnnotArg
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
-
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg
- object NestedAnnotArg extends NestedAnnotArgExtractor
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
-
- class Position extends Attachments {
- override type Pos = Position
- def pos = this
- def withPos(newPos: Position) = newPos
- def isRange = false
- def focus = this
- }
- implicit val PositionTag = ClassTag[Position](classOf[Position])
-
- val NoPosition = new Position
-
- def atPos[T <: Tree](pos: Position)(tree: T): T = tree
-
- private val generated = new mutable.HashMap[String, WeakReference[Symbol]]
-
- private def cached(name: String)(symExpr: => Symbol): Symbol =
- generated get name match {
- case Some(WeakReference(sym)) =>
- sym
- case _ =>
- val sym = symExpr
- generated(name) = WeakReference(sym)
- sym
- }
-
- object build extends BuildBase {
- def selectType(owner: Symbol, name: String): TypeSymbol = {
- val clazz = new ClassSymbol(owner, newTypeName(name), NoFlags)
- cached(clazz.fullName)(clazz).asType
- }
-
- def selectTerm(owner: Symbol, name: String): TermSymbol = {
- val valu = new MethodSymbol(owner, newTermName(name), NoFlags)
- cached(valu.fullName)(valu).asTerm
- }
-
- def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol =
- selectTerm(owner, name).asMethod
-
- def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
- if (name.isTypeName)
- if (isClass) new ClassSymbol(owner, name.toTypeName, flags)
- else new TypeSymbol(owner, name.toTypeName, flags)
- else new TermSymbol(owner, name.toTermName, flags)
-
- def newFreeTerm(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- new FreeTermSymbol(rootMirror.RootClass, newTermName(name), flags)
-
- def newFreeType(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- new FreeTypeSymbol(rootMirror.RootClass, newTypeName(name), flags)
-
- def newFreeExistential(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- new FreeTypeSymbol(rootMirror.RootClass, newTypeName(name), flags)
-
- def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = sym
-
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S = sym
-
- def flagsFromBits(bits: Long): FlagSet = bits
-
- object emptyValDef extends ValDef(NoMods, nme.WILDCARD, TypeTree(NoType), EmptyTree) {
- override def isEmpty = true
- }
-
- def This(sym: Symbol): Tree = self.This(sym.name.toTypeName)
-
- def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym.name)
-
- def Ident(sym: Symbol): Ident = self.Ident(sym.name)
-
- def TypeTree(tp: Type): TypeTree = self.TypeTree()
-
- def thisPrefix(sym: Symbol): Type = SingleType(NoPrefix, sym)
-
- def setType[T <: Tree](tree: T, tpe: Type): T = tree
-
- def setSymbol[T <: Tree](tree: T, sym: Symbol): T = tree
- }
-
- import build._
-
- class Mirror extends MirrorOf[self.type] {
- val universe: self.type = self
-
- lazy val RootClass = new ClassSymbol(NoSymbol, tpnme.ROOT, NoFlags) { override def isModuleClass = true }
- lazy val RootPackage = new ModuleSymbol(NoSymbol, nme.ROOT, NoFlags, RootClass)
- lazy val EmptyPackageClass = new ClassSymbol(RootClass, tpnme.EMPTY_PACKAGE_NAME, NoFlags) { override def isModuleClass = true }
- lazy val EmptyPackage = new ModuleSymbol(RootClass, nme.EMPTY_PACKAGE_NAME, NoFlags, EmptyPackageClass)
-
- def staticClass(fullName: String): ClassSymbol =
- mkStatic[ClassSymbol](fullName)
-
- def staticModule(fullName: String): ModuleSymbol =
- mkStatic[ModuleSymbol](fullName)
-
- def staticPackage(fullName: String): ModuleSymbol =
- staticModule(fullName) // this toy universe doesn't care about the distinction between packages and modules
-
- private def mkStatic[S <: Symbol : ClassTag](fullName: String): S =
- cached(fullName) {
- val point = fullName lastIndexOf '.'
- val owner =
- if (point > 0) staticModule(fullName take point).moduleClass
- else rootMirror.RootClass
- val name = fullName drop point + 1
- val symtag = implicitly[ClassTag[S]]
- if (symtag == ClassSymbolTag) new ClassSymbol(owner, newTypeName(name), NoFlags)
- else owner.newModuleAndClassSymbol(newTermName(name))._1
- }.asInstanceOf[S]
- }
-
- lazy val rootMirror = new Mirror
-
- import rootMirror._
-
- object definitions extends DefinitionsBase {
- lazy val ScalaPackage = staticModule("scala")
- lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass
-
- lazy val AnyClass = staticClass("scala.Any")
- lazy val AnyValClass = staticClass("scala.Any")
- lazy val ObjectClass = staticClass("java.lang.Object")
- lazy val AnyRefClass = ObjectClass
-
- lazy val NullClass = staticClass("scala.Null")
- lazy val NothingClass = staticClass("scala.Nothing")
-
- lazy val UnitClass = staticClass("scala.Unit")
- lazy val ByteClass = staticClass("scala.Byte")
- lazy val ShortClass = staticClass("scala.Short")
- lazy val CharClass = staticClass("scala.Char")
- lazy val IntClass = staticClass("scala.Int")
- lazy val LongClass = staticClass("scala.Long")
- lazy val FloatClass = staticClass("scala.Float")
- lazy val DoubleClass = staticClass("scala.Double")
- lazy val BooleanClass = staticClass("scala.Boolean")
-
- lazy val StringClass = staticClass("java.lang.String")
- lazy val ClassClass = staticClass("java.lang.Class")
- lazy val ArrayClass = staticClass("scala.Array")
- lazy val ListClass = staticClass("scala.List")
-
- lazy val PredefModule = staticModule("scala.Predef")
- }
-
- import definitions._
-
- private def thisModuleType(fullName: String): Type = ThisType(staticModule(fullName).moduleClass)
- private lazy val ScalaPrefix = thisModuleType("scala")
- private lazy val JavaLangPrefix = thisModuleType("java.lang")
-
- lazy val ByteTpe = TypeRef(ScalaPrefix, ByteClass, Nil)
- lazy val ShortTpe = TypeRef(ScalaPrefix, ShortClass, Nil)
- lazy val CharTpe = TypeRef(ScalaPrefix, CharClass, Nil)
- lazy val IntTpe = TypeRef(ScalaPrefix, IntClass, Nil)
- lazy val LongTpe = TypeRef(ScalaPrefix, LongClass, Nil)
- lazy val FloatTpe = TypeRef(ScalaPrefix, FloatClass, Nil)
- lazy val DoubleTpe = TypeRef(ScalaPrefix, DoubleClass, Nil)
- lazy val BooleanTpe = TypeRef(ScalaPrefix, BooleanClass, Nil)
- lazy val UnitTpe = TypeRef(ScalaPrefix, UnitClass, Nil)
- lazy val AnyTpe = TypeRef(ScalaPrefix, AnyClass, Nil)
- lazy val AnyValTpe = TypeRef(ScalaPrefix, AnyValClass, Nil)
- lazy val NothingTpe = TypeRef(ScalaPrefix, NothingClass, Nil)
- lazy val NullTpe = TypeRef(ScalaPrefix, NullClass, Nil)
- lazy val ObjectTpe = TypeRef(JavaLangPrefix, ObjectClass, Nil)
- lazy val AnyRefTpe = ObjectTpe
-
- private var nodeCount = 0 // not synchronized
-
- abstract class Tree extends TreeBase with Product {
- def isDef: Boolean = false
- def isEmpty: Boolean = false
-
- /** The canonical way to test if a Tree represents a term.
- */
- def isTerm: Boolean = this match {
- case _: TermTree => true
- case Bind(name, _) => name.isTermName
- case Select(_, name) => name.isTermName
- case Ident(name) => name.isTermName
- case Annotated(_, arg) => arg.isTerm
- case _ => false
- }
-
- /** The canonical way to test if a Tree represents a type.
- */
- def isType: Boolean = this match {
- case _: TypTree => true
- case Bind(name, _) => name.isTypeName
- case Select(_, name) => name.isTypeName
- case Ident(name) => name.isTypeName
- case Annotated(_, arg) => arg.isType
- case _ => false
- }
- }
-
- def treeToString(tree: Tree) = s"<tree ${tree.getClass}>"
-
- def treeType(tree: Tree) = NoType
-
- trait TermTree extends Tree
-
- trait TypTree extends Tree
-
- trait SymTree extends Tree
-
- trait NameTree extends Tree {
- def name: Name
- }
-
- trait RefTree extends SymTree with NameTree {
- def qualifier: Tree // empty for Idents
- def name: Name
- }
-
- abstract class DefTree extends SymTree with NameTree {
- def name: Name
- override def isDef = true
- }
-
- case object EmptyTree extends TermTree {
- override def isEmpty = true
- }
-
- abstract class MemberDef extends DefTree {
- def mods: Modifiers
- }
-
- case class PackageDef(pid: RefTree, stats: List[Tree])
- extends MemberDef {
- def name = pid.name
- def mods = NoMods
- }
- object PackageDef extends PackageDefExtractor
-
- abstract class ImplDef extends MemberDef {
- def impl: Template
- }
-
- case class ClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template)
- extends ImplDef
- object ClassDef extends ClassDefExtractor
-
- case class ModuleDef(mods: Modifiers, name: TermName, impl: Template)
- extends ImplDef
- object ModuleDef extends ModuleDefExtractor
-
- abstract class ValOrDefDef extends MemberDef {
- val name: Name
- val tpt: Tree
- val rhs: Tree
- }
-
- case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef
- object ValDef extends ValDefExtractor
-
- case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef
- object DefDef extends DefDefExtractor
-
- case class TypeDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree)
- extends MemberDef
- object TypeDef extends TypeDefExtractor
-
- case class LabelDef(name: TermName, params: List[Ident], rhs: Tree)
- extends DefTree with TermTree
- object LabelDef extends LabelDefExtractor
-
- case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- object ImportSelector extends ImportSelectorExtractor
-
- case class Import(expr: Tree, selectors: List[ImportSelector])
- extends SymTree
- object Import extends ImportExtractor
-
- case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
- extends SymTree
- object Template extends TemplateExtractor
-
- case class Block(stats: List[Tree], expr: Tree)
- extends TermTree
- object Block extends BlockExtractor
-
- case class CaseDef(pat: Tree, guard: Tree, body: Tree)
- extends Tree
- object CaseDef extends CaseDefExtractor
-
- case class Alternative(trees: List[Tree])
- extends TermTree
- object Alternative extends AlternativeExtractor
-
- case class Star(elem: Tree)
- extends TermTree
- object Star extends StarExtractor
-
- case class Bind(name: Name, body: Tree)
- extends DefTree
- object Bind extends BindExtractor
-
- case class UnApply(fun: Tree, args: List[Tree])
- extends TermTree
- object UnApply extends UnApplyExtractor
-
- case class ArrayValue(elemtpt: Tree, elems: List[Tree])
- extends TermTree
- object ArrayValue extends ArrayValueExtractor
-
- case class Function(vparams: List[ValDef], body: Tree)
- extends TermTree with SymTree
- object Function extends FunctionExtractor
-
- case class Assign(lhs: Tree, rhs: Tree)
- extends TermTree
- object Assign extends AssignExtractor
-
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
- object AssignOrNamedArg extends AssignOrNamedArgExtractor
-
- case class If(cond: Tree, thenp: Tree, elsep: Tree)
- extends TermTree
- object If extends IfExtractor
-
- case class Match(selector: Tree, cases: List[CaseDef])
- extends TermTree
- object Match extends MatchExtractor
-
- case class Return(expr: Tree)
- extends TermTree with SymTree
- object Return extends ReturnExtractor
-
- case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
- extends TermTree
- object Try extends TryExtractor
-
- case class Throw(expr: Tree)
- extends TermTree
- object Throw extends ThrowExtractor
-
- case class New(tpt: Tree) extends TermTree
- object New extends NewExtractor
-
- case class Typed(expr: Tree, tpt: Tree)
- extends TermTree
- object Typed extends TypedExtractor
-
- abstract class GenericApply extends TermTree {
- val fun: Tree
- val args: List[Tree]
- }
-
- case class TypeApply(fun: Tree, args: List[Tree])
- extends GenericApply
- object TypeApply extends TypeApplyExtractor
-
- case class Apply(fun: Tree, args: List[Tree])
- extends GenericApply
- object Apply extends ApplyExtractor
-
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- object ApplyDynamic extends ApplyDynamicExtractor
-
- case class Super(qual: Tree, mix: TypeName) extends TermTree
- object Super extends SuperExtractor
-
- case class This(qual: TypeName)
- extends TermTree with SymTree
- object This extends ThisExtractor
-
- case class Select(qualifier: Tree, name: Name)
- extends RefTree
- object Select extends SelectExtractor
-
- case class Ident(name: Name) extends RefTree {
- def qualifier: Tree = EmptyTree
- }
- object Ident extends IdentExtractor
-
- case class ReferenceToBoxed(ident: Ident) extends TermTree
- object ReferenceToBoxed extends ReferenceToBoxedExtractor
-
- case class Literal(value: Constant)
- extends TermTree {
- assert(value ne null)
- }
- object Literal extends LiteralExtractor
-
- case class Annotated(annot: Tree, arg: Tree) extends Tree
- object Annotated extends AnnotatedExtractor
-
- case class SingletonTypeTree(ref: Tree)
- extends TypTree
- object SingletonTypeTree extends SingletonTypeTreeExtractor
-
- case class SelectFromTypeTree(qualifier: Tree, name: TypeName)
- extends TypTree with RefTree
- object SelectFromTypeTree extends SelectFromTypeTreeExtractor
-
- case class CompoundTypeTree(templ: Template)
- extends TypTree
- object CompoundTypeTree extends CompoundTypeTreeExtractor
-
- case class AppliedTypeTree(tpt: Tree, args: List[Tree])
- extends TypTree
- object AppliedTypeTree extends AppliedTypeTreeExtractor
-
- case class TypeBoundsTree(lo: Tree, hi: Tree)
- extends TypTree
- object TypeBoundsTree extends TypeBoundsTreeExtractor
-
- case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
- extends TypTree
- object ExistentialTypeTree extends ExistentialTypeTreeExtractor
-
- case class TypeTree() extends TypTree {
- val original: Tree = null
- override def isEmpty = true
- }
- object TypeTree extends TypeTreeExtractor
-
- implicit val TreeTag = ClassTag[Tree](classOf[Tree])
- implicit val TermTreeTag = ClassTag[TermTree](classOf[TermTree])
- implicit val TypTreeTag = ClassTag[TypTree](classOf[TypTree])
- implicit val SymTreeTag = ClassTag[SymTree](classOf[SymTree])
- implicit val NameTreeTag = ClassTag[NameTree](classOf[NameTree])
- implicit val RefTreeTag = ClassTag[RefTree](classOf[RefTree])
- implicit val DefTreeTag = ClassTag[DefTree](classOf[DefTree])
- implicit val MemberDefTag = ClassTag[MemberDef](classOf[MemberDef])
- implicit val PackageDefTag = ClassTag[PackageDef](classOf[PackageDef])
- implicit val ImplDefTag = ClassTag[ImplDef](classOf[ImplDef])
- implicit val ClassDefTag = ClassTag[ClassDef](classOf[ClassDef])
- implicit val ModuleDefTag = ClassTag[ModuleDef](classOf[ModuleDef])
- implicit val ValOrDefDefTag = ClassTag[ValOrDefDef](classOf[ValOrDefDef])
- implicit val ValDefTag = ClassTag[ValDef](classOf[ValDef])
- implicit val DefDefTag = ClassTag[DefDef](classOf[DefDef])
- implicit val TypeDefTag = ClassTag[TypeDef](classOf[TypeDef])
- implicit val LabelDefTag = ClassTag[LabelDef](classOf[LabelDef])
- implicit val ImportSelectorTag = ClassTag[ImportSelector](classOf[ImportSelector])
- implicit val ImportTag = ClassTag[Import](classOf[Import])
- implicit val TemplateTag = ClassTag[Template](classOf[Template])
- implicit val BlockTag = ClassTag[Block](classOf[Block])
- implicit val CaseDefTag = ClassTag[CaseDef](classOf[CaseDef])
- implicit val AlternativeTag = ClassTag[Alternative](classOf[Alternative])
- implicit val StarTag = ClassTag[Star](classOf[Star])
- implicit val BindTag = ClassTag[Bind](classOf[Bind])
- implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply])
- implicit val ArrayValueTag = ClassTag[ArrayValue](classOf[ArrayValue])
- implicit val FunctionTag = ClassTag[Function](classOf[Function])
- implicit val AssignTag = ClassTag[Assign](classOf[Assign])
- implicit val AssignOrNamedArgTag = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg])
- implicit val IfTag = ClassTag[If](classOf[If])
- implicit val MatchTag = ClassTag[Match](classOf[Match])
- implicit val ReturnTag = ClassTag[Return](classOf[Return])
- implicit val TryTag = ClassTag[Try](classOf[Try])
- implicit val ThrowTag = ClassTag[Throw](classOf[Throw])
- implicit val NewTag = ClassTag[New](classOf[New])
- implicit val TypedTag = ClassTag[Typed](classOf[Typed])
- implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
- implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
- implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
- implicit val SuperTag = ClassTag[Super](classOf[Super])
- implicit val ThisTag = ClassTag[This](classOf[This])
- implicit val SelectTag = ClassTag[Select](classOf[Select])
- implicit val IdentTag = ClassTag[Ident](classOf[Ident])
- implicit val ReferenceToBoxedTag = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed])
- implicit val LiteralTag = ClassTag[Literal](classOf[Literal])
- implicit val AnnotatedTag = ClassTag[Annotated](classOf[Annotated])
- implicit val SingletonTypeTreeTag = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree])
- implicit val SelectFromTypeTreeTag = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree])
- implicit val CompoundTypeTreeTag = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree])
- implicit val AppliedTypeTreeTag = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree])
- implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
- implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
- implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
-
- // [Eugene++] to be removed after SI-5863 is fixed
- def ClassDef(sym: Symbol, impl: Template): ClassDef = ???
- def ModuleDef(sym: Symbol, impl: Template): ModuleDef = ???
- def ValDef(sym: Symbol, rhs: Tree): ValDef = ???
- def ValDef(sym: Symbol): ValDef = ???
- def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, rhs: Tree): DefDef = ???
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = ???
- def TypeDef(sym: Symbol, rhs: Tree): TypeDef = ???
- def TypeDef(sym: Symbol): TypeDef = ???
- def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef = ???
- def CaseDef(pat: Tree, body: Tree): CaseDef = ???
- def Bind(sym: Symbol, body: Tree): Bind = ???
- def Try(body: Tree, cases: (Tree, Tree)*): Try = ???
- def Throw(tpe: Type, args: Tree*): Throw = ???
- def Apply(sym: Symbol, args: Tree*): Tree = ???
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = ???
- def New(tpe: Type, args: Tree*): Tree = ???
- def New(sym: Symbol, args: Tree*): Tree = ???
- def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree = ???
- def Super(sym: Symbol, mix: TypeName): Tree = ???
- def This(sym: Symbol): Tree = ???
- def Select(qualifier: Tree, name: String): Select = ???
- def Select(qualifier: Tree, sym: Symbol): Select = ???
- def Ident(name: String): Ident = ???
- def Ident(sym: Symbol): Ident = ???
- def Block(stats: Tree*): Block = ???
- def TypeTree(tp: Type): TypeTree = ???
-}
diff --git a/src/library/scala/reflect/base/BuildUtils.scala b/src/library/scala/reflect/base/BuildUtils.scala
deleted file mode 100644
index eaba0ec2b7..0000000000
--- a/src/library/scala/reflect/base/BuildUtils.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-package scala.reflect
-package base
-
-trait BuildUtils { self: Universe =>
-
- val build: BuildBase
-
- abstract class BuildBase {
- /** Selects type symbol with given simple name `name` from the defined members of `owner`.
- */
- def selectType(owner: Symbol, name: String): TypeSymbol
-
- /** Selects term symbol with given name and type from the defined members of prefix type
- */
- def selectTerm(owner: Symbol, name: String): TermSymbol
-
- /** Selects overloaded method symbol with given name and index
- */
- def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol
-
- /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
- * the current symbol as its owner.
- */
- def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: FlagSet, isClass: Boolean): Symbol
-
- /** Create a fresh free term symbol.
- * @param name the name of the free variable
- * @param info the type signature of the free variable
- * @param value the value of the free variable at runtime
- * @param flags (optional) flags of the free variable
- * @param origin debug information that tells where this symbol comes from
- */
- def newFreeTerm(name: String, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
-
- /** Create a fresh free non-existential type symbol.
- * @param name the name of the free variable
- * @param info the type signature of the free variable
- * @param value a type tag that captures the value of the free variable
- * is completely phantom, since the captured type cannot be propagated to the runtime
- * if it could be, we wouldn't be creating a free type to begin with
- * the only usage for it is preserving the captured symbol for compile-time analysis
- * @param flags (optional) flags of the free variable
- * @param origin debug information that tells where this symbol comes from
- */
- def newFreeType(name: String, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
-
- /** Create a fresh free existential type symbol.
- * @param name the name of the free variable
- * @param info the type signature of the free variable
- * @param value a type tag that captures the value of the free variable
- * is completely phantom, since the captured type cannot be propagated to the runtime
- * if it could be, we wouldn't be creating a free type to begin with
- * the only usage for it is preserving the captured symbol for compile-time analysis
- * @param flags (optional) flags of the free variable
- * @param origin (optional) debug information that tells where this symbol comes from
- * [Martin to Eugene: why needed?]
- */
- def newFreeExistential(name: String, info: Type, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
-
- /** Set symbol's type signature to given type.
- * @return the symbol itself
- */
- def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S
-
- /** Set symbol's annotations to given annotations `annots`.
- */
- def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S
-
- def flagsFromBits(bits: Long): FlagSet
-
- // [Eugene++ to Martin] these are necessary for reification
- // on a second thought, I added them to BuildUtils instead of base
-
- def emptyValDef: ValDef
-
- def This(sym: Symbol): Tree
-
- def Select(qualifier: Tree, sym: Symbol): Select
-
- def Ident(sym: Symbol): Ident
-
- def TypeTree(tp: Type): TypeTree
-
- def thisPrefix(sym: Symbol): Type
-
- def setType[T <: Tree](tree: T, tpe: Type): T
-
- def setSymbol[T <: Tree](tree: T, sym: Symbol): T
- }
-}
diff --git a/src/library/scala/reflect/base/Constants.scala b/src/library/scala/reflect/base/Constants.scala
deleted file mode 100644
index ba12b02e92..0000000000
--- a/src/library/scala/reflect/base/Constants.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-trait Constants {
- self: Universe =>
-
- type Constant >: Null <: AnyRef
- implicit val ConstantTag: ClassTag[Constant]
- val Constant: ConstantExtractor
-
- abstract class ConstantExtractor {
- def apply(value: Any): Constant
- def unapply(arg: Constant): Option[Any]
- }
-}
diff --git a/src/library/scala/reflect/base/Exprs.scala b/src/library/scala/reflect/base/Exprs.scala
deleted file mode 100644
index 47af4f3a9d..0000000000
--- a/src/library/scala/reflect/base/Exprs.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-trait Exprs { self: Universe =>
-
- /** An expression tree tagged with its type */
- trait Expr[+T] extends Equals with Serializable {
- val mirror: Mirror
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T]
-
- def tree: Tree
- def staticType: Type
- def actualType: Type
-
- def splice: T
- val value: T
-
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
- override def equals(x: Any) = x.isInstanceOf[Expr[_]] && this.mirror == x.asInstanceOf[Expr[_]].mirror && this.tree == x.asInstanceOf[Expr[_]].tree
- override def hashCode = mirror.hashCode * 31 + tree.hashCode
- override def toString = "Expr["+staticType+"]("+tree+")"
- }
-
- object Expr {
- def apply[T: AbsTypeTag](mirror: MirrorOf[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
- def unapply[T](expr: Expr[T]): Option[Tree] = Some(expr.tree)
- }
-
- private class ExprImpl[+T: AbsTypeTag](val mirror: Mirror, val treec: TreeCreator) extends Expr[T] {
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # Expr[T] = {
- val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- val tag1 = (implicitly[AbsTypeTag[T]] in otherMirror).asInstanceOf[otherMirror.universe.AbsTypeTag[T]]
- otherMirror.universe.Expr[T](otherMirror1, treec)(tag1)
- }
-
- lazy val tree: Tree = treec[Exprs.this.type](mirror)
- // [Eugene++] this is important
- // !!! remove when we have improved type inference for singletons
- // search for .type] to find other instances
- lazy val staticType: Type = implicitly[AbsTypeTag[T]].tpe
- def actualType: Type = treeType(tree)
-
- def splice: T = throw new UnsupportedOperationException("""
- |the function you're calling has not been spliced by the compiler.
- |this means there is a cross-stage evaluation involved, and it needs to be invoked explicitly.
- |if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
- |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
- lazy val value: T = throw new UnsupportedOperationException("""
- |the value you're calling is only meant to be used in cross-stage path-dependent types.
- |if you want to splice the underlying expression, use `<your expr>.splice`.
- |if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath,
- |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
-
- private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[AbsTypeTag[T]].in(scala.reflect.basis.rootMirror))
- }
-}
-
-private[scala] class SerializedExpr(var treec: TreeCreator, var tag: scala.reflect.basis.AbsTypeTag[_]) extends Serializable {
- private def writeObject(out: java.io.ObjectOutputStream): Unit = {
- out.writeObject(treec)
- out.writeObject(tag)
- }
-
- private def readObject(in: java.io.ObjectInputStream): Unit = {
- treec = in.readObject().asInstanceOf[TreeCreator]
- tag = in.readObject().asInstanceOf[scala.reflect.basis.AbsTypeTag[_]]
- }
-
- private def readResolve(): AnyRef = {
- import scala.reflect.basis._
- Expr(rootMirror, treec)(tag)
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/FlagSets.scala b/src/library/scala/reflect/base/FlagSets.scala
deleted file mode 100644
index 43de9970c0..0000000000
--- a/src/library/scala/reflect/base/FlagSets.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.reflect
-package base
-
-trait FlagSets { self: Universe =>
-
- /** An abstract type representing sets of flags that apply to definition trees and symbols */
- type FlagSet
-
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FlagSetTag: ClassTag[FlagSet]
-
- /** The empty set of flags */
- val NoFlags: FlagSet
-
- /** The base API all flag bearers support */
- trait HasFlagsBase {
- def flags: FlagSet
- def hasFlag(flags: FlagSet): Boolean
- }
-}
-
diff --git a/src/library/scala/reflect/base/Mirrors.scala b/src/library/scala/reflect/base/Mirrors.scala
deleted file mode 100644
index 50866ef000..0000000000
--- a/src/library/scala/reflect/base/Mirrors.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-package scala.reflect
-package base
-
-trait Mirrors {
- self: Universe =>
-
- /** .. */
- type Mirror >: Null <: MirrorOf[self.type]
-
- /** .. */
- val rootMirror: Mirror
-}
diff --git a/src/library/scala/reflect/base/Names.scala b/src/library/scala/reflect/base/Names.scala
deleted file mode 100644
index 532b780e7e..0000000000
--- a/src/library/scala/reflect/base/Names.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-package scala.reflect
-package base
-
-import language.implicitConversions
-
-/** A trait that manages names.
- * A name is a string in one of two name universes: terms and types.
- * The same string can be a name in both universes.
- * Two names are equal if they represent the same string and they are
- * members of the same universe.
- *
- * Names are interned. That is, for two names `name11 and `name2`,
- * `name1 == name2` implies `name1 eq name2`.
- */
-trait Names {
- /** Intentionally no implicit from String => Name. */
- implicit def stringToTermName(s: String): TermName = newTermName(s)
- implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
-
- /** The abstract type of names */
- type Name >: Null <: NameBase
- implicit val NameTag: ClassTag[Name]
-
- /** The abstract type of names representing terms */
- type TypeName >: Null <: Name
- implicit val TypeNameTag: ClassTag[TypeName]
-
- /** The abstract type of names representing types */
- type TermName >: Null <: Name
- implicit val TermNameTag: ClassTag[TermName]
-
- /** The base API that all names support */
- abstract class NameBase {
- /** Is this name a term name? */
- def isTermName: Boolean
-
- /** Is this name a type name? */
- def isTypeName: Boolean
-
- /** Returns a term name that represents the same string as this name */
- def toTermName: TermName
-
- /** Returns a type name that represents the same string as this name */
- def toTypeName: TypeName
- }
-
- /** Create a new term name.
- */
- def newTermName(s: String): TermName
-
- /** Creates a new type name.
- */
- def newTypeName(s: String): TypeName
-
- def EmptyTermName: TermName = newTermName("")
-
- def EmptyTypeName: TypeName = EmptyTermName.toTypeName
-}
diff --git a/src/library/scala/reflect/base/Positions.scala b/src/library/scala/reflect/base/Positions.scala
deleted file mode 100644
index cefeb51c9a..0000000000
--- a/src/library/scala/reflect/base/Positions.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package scala.reflect
-package base
-
-trait Positions {
- self: Universe =>
-
- /** .. */
- type Position >: Null <: Attachments { type Pos = Position }
-
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val PositionTag: ClassTag[Position]
-
- /** .. */
- val NoPosition: Position
-
- /** Assigns a given position to all position-less nodes of a given AST.
- */
- def atPos[T <: Tree](pos: Position)(tree: T): T
- // [Eugene++] why do we have this in base?
-}
diff --git a/src/library/scala/reflect/base/StandardDefinitions.scala b/src/library/scala/reflect/base/StandardDefinitions.scala
deleted file mode 100644
index fe32fdb4c2..0000000000
--- a/src/library/scala/reflect/base/StandardDefinitions.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-// [Eugene++] not sure whether we need this in the top level of the universe
-trait StandardTypes {
- self: Universe =>
-
- val ByteTpe: Type
- val ShortTpe: Type
- val CharTpe: Type
- val IntTpe: Type
- val LongTpe: Type
- val FloatTpe: Type
- val DoubleTpe: Type
- val BooleanTpe: Type
- val UnitTpe: Type
-
- val AnyTpe: Type
- val AnyValTpe: Type
- val AnyRefTpe: Type
- val ObjectTpe: Type
-
- val NothingTpe: Type
- val NullTpe: Type
-}
-
-trait StandardDefinitions extends StandardTypes {
- self: Universe =>
-
- val definitions: DefinitionsBase
-
- // [Eugene] todo. shortcut to these fields if possible when generating tags
- // todo. also shortcut to StandardTypes, of course
- trait DefinitionsBase {
- // packages
- def ScalaPackageClass: ClassSymbol
- def ScalaPackage: ModuleSymbol
-
- // top types
- def AnyClass : ClassSymbol
- def AnyValClass: ClassSymbol
- def ObjectClass: ClassSymbol
- def AnyRefClass: TypeSymbol
-
- // bottom types
- def NullClass : ClassSymbol
- def NothingClass: ClassSymbol
-
- // the scala value classes
- def UnitClass : ClassSymbol
- def ByteClass : ClassSymbol
- def ShortClass : ClassSymbol
- def CharClass : ClassSymbol
- def IntClass : ClassSymbol
- def LongClass : ClassSymbol
- def FloatClass : ClassSymbol
- def DoubleClass : ClassSymbol
- def BooleanClass: ClassSymbol
-
- // some special classes
- def StringClass : ClassSymbol
- def ClassClass : ClassSymbol
- def ArrayClass : ClassSymbol
- def ListClass : ClassSymbol // [Eugene] I'd say List has earned its right to be here
-
- // the Predef object
- def PredefModule: ModuleSymbol
- }
-}
diff --git a/src/library/scala/reflect/base/StandardNames.scala b/src/library/scala/reflect/base/StandardNames.scala
deleted file mode 100644
index 3e569cd523..0000000000
--- a/src/library/scala/reflect/base/StandardNames.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* NSC -- new Scala compiler
-* Copyright 2005-2012 LAMP/EPFL
-* @author Martin Odersky
-*/
-
-package scala.reflect
-package base
-
-// Q: I have a pretty name. Where do I put it - into base.StandardNames or into api.StandardNames?
-// A: Is it necessary to construct trees (like EMPTY or WILDCARD_STAR)? If yes, then it goes to base.StandardNames.
-// Is it necessary to perform reflection (like ERROR or LOCAL_SUFFIX_STRING)? If yes, then it goes to api.StandardNames.
-// Otherwise it goes nowhere - reflection API should stay minimalistic.
-
-trait StandardNames {
- self: Universe =>
-
- val nme: TermNamesBase
- val tpnme: TypeNamesBase
-
- trait NamesBase {
- type NameType >: Null <: Name
- val WILDCARD: NameType
- }
-
- trait TermNamesBase extends NamesBase {
- val CONSTRUCTOR: TermName
- val ROOTPKG: TermName
- }
-
- trait TypeNamesBase extends NamesBase {
- val EMPTY: NameType
- val WILDCARD_STAR: NameType
- }
-}
diff --git a/src/library/scala/reflect/base/Symbols.scala b/src/library/scala/reflect/base/Symbols.scala
deleted file mode 100644
index fe857c540f..0000000000
--- a/src/library/scala/reflect/base/Symbols.scala
+++ /dev/null
@@ -1,292 +0,0 @@
-package scala.reflect
-package base
-
-trait Symbols { self: Universe =>
-
- // [Eugene++ to Martin] why is Symbol >: Null, whereas all other symbol types are not nullable?
- // same question goes for Types
-
- /** The abstract type of symbols representing declarations */
- type Symbol >: Null <: SymbolBase
-
- /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SymbolTag: ClassTag[Symbol]
-
- /** The abstract type of type symbols representing type, class, and trait declarations,
- * as well as type parameters
- */
- type TypeSymbol >: Null <: Symbol with TypeSymbolBase
-
- /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeSymbolTag: ClassTag[TypeSymbol]
-
- /** The abstract type of term symbols representing val, var, def, and object declarations as
- * well as packages and value parameters.
- */
- type TermSymbol >: Null <: Symbol with TermSymbolBase
-
- /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TermSymbolTag: ClassTag[TermSymbol]
-
- /** The abstract type of method symbols representing def declarations */
- type MethodSymbol >: Null <: TermSymbol with MethodSymbolBase
-
- /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MethodSymbolTag: ClassTag[MethodSymbol]
-
- /** The abstract type of module symbols representing object declarations */
- type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolBase
-
- /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
-
- /** The abstract type of class symbols representing class and trait definitions */
- type ClassSymbol >: Null <: TypeSymbol with ClassSymbolBase
-
- /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ClassSymbolTag: ClassTag[ClassSymbol]
-
- /** The abstract type of free terms introduced by reification */
- type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolBase
-
- /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
-
- /** The abstract type of free types introduced by reification */
- type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolBase
-
- /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
-
- /** A special "missing" symbol */
- val NoSymbol: Symbol
-
- /** The base API that all symbols support */
- trait SymbolBase { this: Symbol =>
-
- /** The owner of this symbol. This is the symbol
- * that directly contains the current symbol's definition.
- * The `NoSymbol` symbol does not have an owner, and calling this method
- * on one causes an internal error.
- * The owner of the Scala root class [[scala.reflect.api.mirror.RootClass]]
- * and the Scala root object [[scala.reflect.api.mirror.RootPackage]] is `NoSymbol`.
- * Every other symbol has a chain of owners that ends in
- * [[scala.reflect.api.mirror.RootClass]].
- */
- def owner: Symbol
-
- /** The type of the symbol name.
- * Can be either `TermName` or `TypeName` depending on whether this is a `TermSymbol` or a `TypeSymbol`.
- *
- * Type name namespaces do not intersect with term name namespaces.
- * This fact is reflected in different types for names of `TermSymbol` and `TypeSymbol`.
- */
- type NameType >: Null <: Name
-
- /** The name of the symbol as a member of the `Name` type.
- */
- def name: Name
-
- /** The encoded full path name of this symbol, where outer names and inner names
- * are separated by periods.
- */
- def fullName: String
-
- /** Does this symbol represent the definition of a type?
- * Note that every symbol is either a term or a type.
- * So for every symbol `sym`, either `sym.isTerm` is true
- * or `sym.isType` is true.
- */
- def isType: Boolean = false
-
- /** This symbol cast to a TypeSymbol.
- * @throws ScalaReflectionException if `isType` is false.
- */
- def asType: TypeSymbol = throw new ScalaReflectionException(s"$this is not a type")
-
- /** Does this symbol represent the definition of a term?
- * Note that every symbol is either a term or a term.
- * So for every symbol `sym`, either `sym.isTerm` is true
- * or `sym.isTerm` is true.
- */
- def isTerm: Boolean = false
-
- /** This symbol cast to a TermSymbol.
- * @throws ScalaReflectionException if `isTerm` is false.
- */
- def asTerm: TermSymbol = throw new ScalaReflectionException(s"$this is not a term")
-
- /** Does this symbol represent the definition of a method?
- * If yes, `isTerm` is also guaranteed to be true.
- */
- def isMethod: Boolean = false
-
- /** This symbol cast to a MethodSymbol.
- * @throws ScalaReflectionException if `isMethod` is false.
- */
- def asMethod: MethodSymbol = {
- def overloadedMsg =
- "encapsulates multiple overloaded alternatives and cannot be treated as a method. "+
- "Consider invoking `<offending symbol>.asTerm.alternatives` and manually picking the required method"
- def vanillaMsg = "is not a method"
- val msg = if (isOverloadedMethod) overloadedMsg else vanillaMsg
- throw new ScalaReflectionException(s"$this $msg")
- }
-
- /** Used to provide a better error message for `asMethod` */
- protected def isOverloadedMethod = false
-
- /** Does this symbol represent the definition of a module (i.e. it
- * results from an object definition?).
- * If yes, `isTerm` is also guaranteed to be true.
- */
- def isModule: Boolean = false
-
- /** This symbol cast to a ModuleSymbol defined by an object definition.
- * @throws ScalaReflectionException if `isModule` is false.
- */
- def asModule: ModuleSymbol = throw new ScalaReflectionException(s"$this is not a module")
-
- /** Does this symbol represent the definition of a class or trait?
- * If yes, `isType` is also guaranteed to be true.
- */
- def isClass: Boolean = false
-
- /** Does this symbol represent the definition of a class implicitly associated
- * with an object definition (module class in scala compiler parlance).
- * If yes, `isType` is also guaranteed to be true.
- */
- def isModuleClass: Boolean = false
-
- /** This symbol cast to a ClassSymbol representing a class or trait.
- * @throws ScalaReflectionException if `isClass` is false.
- */
- def asClass: ClassSymbol = throw new ScalaReflectionException(s"$this is not a class")
-
- /** Does this symbol represent a free term captured by reification?
- * If yes, `isTerm` is also guaranteed to be true.
- */
- def isFreeTerm: Boolean = false
-
- /** This symbol cast to a free term symbol.
- * @throws ScalaReflectionException if `isFreeTerm` is false.
- */
- def asFreeTerm: FreeTermSymbol = throw new ScalaReflectionException(s"$this is not a free term")
-
- /** Does this symbol represent a free type captured by reification?
- * If yes, `isType` is also guaranteed to be true.
- */
- def isFreeType: Boolean = false
-
- /** This symbol cast to a free type symbol.
- * @throws ScalaReflectionException if `isFreeType` is false.
- */
- def asFreeType: FreeTypeSymbol = throw new ScalaReflectionException(s"$this is not a free type")
-
- def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol
- def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol)
- def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
- def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol
- def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol
- }
-
- /** The base API that all type symbols support */
- trait TypeSymbolBase extends SymbolBase { this: TypeSymbol =>
- /** Type symbols have their names of type `TypeName`.
- */
- final type NameType = TypeName
-
- /** The type constructor corresponding to this type symbol.
- * This is different from `toType` in that type parameters
- * are part of results of `toType`, but not of `toTypeConstructor`.
- *
- * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
- * `C`. Then `C.toType` is the type `C[T]`, but `C.toTypeConstructor` is `C`.
- */
- def toTypeConstructor: Type
-
- /** A type reference that refers to this type symbol seen
- * as a member of given type `site`.
- */
- def toTypeIn(site: Type): Type
-
- /** A type reference that refers to this type symbol
- * Note if symbol is a member of a class, one almost always is interested
- * in `asTypeIn` with a site type instead.
- *
- * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
- * `C`. Then `C.toType` is the type `C[T]`.
- *
- * By contrast, `C.typeSignature` would be a type signature of form
- * `PolyType(ClassInfoType(...))` that describes type parameters, value
- * parameters, parent types, and members of `C`.
- */
- def toType: Type
-
- override def isType = true
- override def asType = this
- }
-
- /** The base API that all term symbols support */
- trait TermSymbolBase extends SymbolBase { this: TermSymbol =>
- /** Term symbols have their names of type `TermName`.
- */
- final type NameType = TermName
-
- final override def isTerm = true
- final override def asTerm = this
- }
-
- /** The base API that all method symbols support */
- trait MethodSymbolBase extends TermSymbolBase { this: MethodSymbol =>
- final override def isMethod = true
- final override def asMethod = this
- }
-
- /** The base API that all module symbols support */
- trait ModuleSymbolBase extends TermSymbolBase { this: ModuleSymbol =>
- /** The class implicitly associated with the object definition.
- * One can go back from a module class to the associated module symbol
- * by inspecting its `selfType.termSymbol`.
- */
- def moduleClass: Symbol // needed for tree traversals
- // [Eugene++] when this becomes `moduleClass: ClassSymbol`, it will be the happiest day in my life
-
- final override def isModule = true
- final override def asModule = this
- }
-
- /** The base API that all class symbols support */
- trait ClassSymbolBase extends TypeSymbolBase { this: ClassSymbol =>
- final override def isClass = true
- final override def asClass = this
- }
-
- /** The base API that all free type symbols support */
- trait FreeTypeSymbolBase extends TypeSymbolBase { this: FreeTypeSymbol =>
- final override def isFreeType = true
- final override def asFreeType = this
- }
-
- /** The base API that all free term symbols support */
- trait FreeTermSymbolBase extends TermSymbolBase { this: FreeTermSymbol =>
- final override def isFreeTerm = true
- final override def asFreeTerm = this
- }
-}
diff --git a/src/library/scala/reflect/base/TagInterop.scala b/src/library/scala/reflect/base/TagInterop.scala
deleted file mode 100644
index a9f0b60fd2..0000000000
--- a/src/library/scala/reflect/base/TagInterop.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.reflect
-package base
-
-import scala.runtime.ScalaRunTime._
-
-trait TagInterop { self: Universe =>
- // [Eugene++] `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
- // if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
-
- // [Eugene++] would be great if we could approximate the interop without any mirrors
- // todo. think how to implement that
-
- def typeTagToManifest[T: ClassTag](mirror: Any, tag: base.Universe # TypeTag[T]): Manifest[T] =
- throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
-
- def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): base.Universe # TypeTag[T] =
- throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use scala.reflect.runtime.universe from scala-reflect.jar.")
-}
diff --git a/src/library/scala/reflect/base/TreeCreator.scala b/src/library/scala/reflect/base/TreeCreator.scala
deleted file mode 100644
index c9c8de2307..0000000000
--- a/src/library/scala/reflect/base/TreeCreator.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala.reflect
-package base
-
-abstract class TreeCreator {
- def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Tree
-}
diff --git a/src/library/scala/reflect/base/Trees.scala b/src/library/scala/reflect/base/Trees.scala
deleted file mode 100644
index 7fa3c90e7d..0000000000
--- a/src/library/scala/reflect/base/Trees.scala
+++ /dev/null
@@ -1,1459 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.reflect
-package base
-
-// [Eugene++] of all reflection APIs, this one is in the biggest need of review and documentation
-
-// Syncnote: Trees are currently not thread-safe.
-// [Eugene++] now when trees are finally abstract types, can we do something for this?
-trait Trees { self: Universe =>
-
- /** The base API that all trees support */
- abstract class TreeBase extends Product { this: Tree =>
- /** ... */
- def isDef: Boolean
-
- /** ... */
- def isEmpty: Boolean
-
- /** The canonical way to test if a Tree represents a term.
- */
- def isTerm: Boolean
-
- /** The canonical way to test if a Tree represents a type.
- */
- def isType: Boolean
-
- /** Obtains string representation of a tree */
- override def toString: String = treeToString(this)
- }
-
- /** Obtains string representation of a tree */
- protected def treeToString(tree: Tree): String
-
- /** Obtains the type of the tree (we intentionally don't expose `tree.tpe` in base) */
- protected def treeType(tree: Tree): Type
-
- /** Tree is the basis for scala's abstract syntax. The nodes are
- * implemented as case classes, and the parameters which initialize
- * a given tree are immutable: however Trees have several mutable
- * fields which are manipulated in the course of typechecking,
- * including pos, symbol, and tpe.
- *
- * Newly instantiated trees have tpe set to null (though it
- * may be set immediately thereafter depending on how it is
- * constructed.) When a tree is passed to the typer, typically via
- * `typer.typed(tree)`, under normal circumstances the tpe must be
- * null or the typer will ignore it. Furthermore, the typer is not
- * required to return the same tree it was passed.
- *
- * Trees can be easily traversed with e.g. foreach on the root node;
- * for a more nuanced traversal, subclass Traverser. Transformations
- * can be considerably trickier: see the numerous subclasses of
- * Transformer found around the compiler.
- *
- * Copying Trees should be done with care depending on whether
- * it need be done lazily or strictly (see LazyTreeCopier and
- * StrictTreeCopier) and on whether the contents of the mutable
- * fields should be copied. The tree copiers will copy the mutable
- * attributes to the new tree; calling Tree#duplicate will copy
- * symbol and tpe, but all the positions will be focused.
- *
- * Trees can be coarsely divided into four mutually exclusive categories:
- *
- * - TermTrees, representing terms
- * - TypTrees, representing types. Note that is `TypTree`, not `TypeTree`.
- * - SymTrees, which may represent types or terms.
- * - Other Trees, which have none of those as parents.
- *
- * SymTrees include important nodes Ident and Select, which are
- * used as both terms and types; they are distinguishable based on
- * whether the Name is a TermName or TypeName. The correct way for
- * to test for a type or a term (on any Tree) are the isTerm/isType
- * methods on Tree.
- *
- * "Others" are mostly syntactic or short-lived constructs. Examples
- * include CaseDef, which wraps individual match cases: they are
- * neither terms nor types, nor do they carry a symbol. Another
- * example is Parens, which is eliminated during parsing.
- */
- type Tree >: Null <: TreeBase
- // [Eugene++] todo. discuss nullability of abstract types
-
- /** A tag that preserves the identity of the `Tree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TreeTag: ClassTag[Tree]
-
- /** The empty tree */
- val EmptyTree: Tree
-
- /** A tree for a term. Not all terms are TermTrees; use isTerm
- * to reliably identify terms.
- */
- type TermTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TermTreeTag: ClassTag[TermTree]
-
- /** A tree for a type. Not all types are TypTrees; use isType
- * to reliably identify types.
- */
- type TypTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypTreeTag: ClassTag[TypTree]
-
- /** A tree with a mutable symbol field, initialized to NoSymbol.
- */
- type SymTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SymTreeTag: ClassTag[SymTree]
-
- /** A tree with a name - effectively, a DefTree or RefTree.
- */
- type NameTree >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val NameTreeTag: ClassTag[NameTree]
-
- /** A tree which references a symbol-carrying entity.
- * References one, as opposed to defining one; definitions
- * are in DefTrees.
- */
- type RefTree >: Null <: SymTree with NameTree
-
- /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val RefTreeTag: ClassTag[RefTree]
-
- /** A tree which defines a symbol-carrying entity.
- */
- type DefTree >: Null <: SymTree with NameTree
-
- /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val DefTreeTag: ClassTag[DefTree]
-
- /** Common base class for all member definitions: types, classes,
- * objects, packages, vals and vars, defs.
- */
- type MemberDef >: Null <: DefTree
-
- /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MemberDefTag: ClassTag[MemberDef]
-
- /** A packaging, such as `package pid { stats }`
- */
- type PackageDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val PackageDefTag: ClassTag[PackageDef]
-
- /** The constructor/deconstructor for `PackageDef` instances. */
- val PackageDef: PackageDefExtractor
-
- /** An extractor class to create and pattern match with syntax `PackageDef(pid, stats)`.
- * This AST node corresponds to the following Scala code:
- *
- * `package` pid { stats }
- */
- abstract class PackageDefExtractor {
- def apply(pid: RefTree, stats: List[Tree]): PackageDef
- def unapply(packageDef: PackageDef): Option[(RefTree, List[Tree])]
- }
-
- /** A common base class for class and object definitions.
- */
- type ImplDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ImplDefTag: ClassTag[ImplDef]
-
- /** A class definition.
- */
- type ClassDef >: Null <: ImplDef
-
- /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ClassDefTag: ClassTag[ClassDef]
-
- /** The constructor/deconstructor for `ClassDef` instances. */
- val ClassDef: ClassDefExtractor
-
- /** An extractor class to create and pattern match with syntax `ClassDef(mods, name, tparams, impl)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `class` name [tparams] impl
- *
- * Where impl stands for:
- *
- * `extends` parents { defs }
- */
- abstract class ClassDefExtractor {
- def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template): ClassDef
- def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)]
- }
-
- /** An object definition, e.g. `object Foo`. Internally, objects are
- * quite frequently called modules to reduce ambiguity.
- * Eliminated by refcheck.
- */
- type ModuleDef >: Null <: ImplDef
-
- /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ModuleDefTag: ClassTag[ModuleDef]
-
- /** The constructor/deconstructor for `ModuleDef` instances. */
- val ModuleDef: ModuleDefExtractor
-
- /** An extractor class to create and pattern match with syntax `ModuleDef(mods, name, impl)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `object` name impl
- *
- * Where impl stands for:
- *
- * `extends` parents { defs }
- */
- abstract class ModuleDefExtractor {
- def apply(mods: Modifiers, name: TermName, impl: Template): ModuleDef
- def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)]
- }
-
- /** A common base class for ValDefs and DefDefs.
- */
- type ValOrDefDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
-
- /** Broadly speaking, a value definition. All these are encoded as ValDefs:
- *
- * - immutable values, e.g. "val x"
- * - mutable values, e.g. "var x" - the MUTABLE flag set in mods
- * - lazy values, e.g. "lazy val x" - the LAZY flag set in mods
- * - method parameters, see vparamss in DefDef - the PARAM flag is set in mods
- * - explicit self-types, e.g. class A { self: Bar => } - !!! not sure what is set.
- */
- type ValDef >: Null <: ValOrDefDef
-
- /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ValDefTag: ClassTag[ValDef]
-
- /** The constructor/deconstructor for `ValDef` instances. */
- val ValDef: ValDefExtractor
-
- /** An extractor class to create and pattern match with syntax `ValDef(mods, name, tpt, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `val` name: tpt = rhs
- *
- * mods `var` name: tpt = rhs
- *
- * mods name: tpt = rhs // in signatures of function and method definitions
- *
- * self: Bar => // self-types (!!! not sure what is set)
- *
- * If the type of a value is not specified explicitly (i.e. is meant to be inferred),
- * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
- */
- abstract class ValDefExtractor {
- def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
- def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)]
- }
-
- /** A method or macro definition.
- * @param name The name of the method or macro. Can be a type name in case this is a type macro
- */
- type DefDef >: Null <: ValOrDefDef
-
- /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val DefDefTag: ClassTag[DefDef]
-
- /** The constructor/deconstructor for `DefDef` instances. */
- val DefDef: DefDefExtractor
-
- /** An extractor class to create and pattern match with syntax `DefDef(mods, name, tparams, vparamss, tpt, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `def` name[tparams](vparams_1)...(vparams_n): tpt = rhs
- *
- * If the return type is not specified explicitly (i.e. is meant to be inferred),
- * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
- */
- abstract class DefDefExtractor {
- def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
- def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
- }
-
- /** An abstract type, a type parameter, or a type alias.
- * Eliminated by erasure.
- */
- type TypeDef >: Null <: MemberDef
-
- /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeDefTag: ClassTag[TypeDef]
-
- /** The constructor/deconstructor for `TypeDef` instances. */
- val TypeDef: TypeDefExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeDef(mods, name, tparams, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * mods `type` name[tparams] = rhs
- *
- * mods `type` name[tparams] >: lo <: hi
- *
- * First usage illustrates `TypeDefs` representing type aliases and type parameters.
- * Second usage illustrates `TypeDefs` representing abstract types,
- * where lo and hi are both `TypeBoundsTrees` and `Modifier.deferred` is set in mods.
- */
- abstract class TypeDefExtractor {
- def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef
- def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)]
- }
-
- /** A labelled expression. Not expressible in language syntax, but
- * generated by the compiler to simulate while/do-while loops, and
- * also by the pattern matcher.
- *
- * The label acts much like a nested function, where `params` represents
- * the incoming parameters. The symbol given to the LabelDef should have
- * a MethodType, as if it were a nested function.
- *
- * Jumps are apply nodes attributed with a label's symbol. The
- * arguments from the apply node will be passed to the label and
- * assigned to the Idents.
- *
- * Forward jumps within a block are allowed.
- */
- type LabelDef >: Null <: DefTree with TermTree
-
- /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val LabelDefTag: ClassTag[LabelDef]
-
- /** The constructor/deconstructor for `LabelDef` instances. */
- val LabelDef: LabelDefExtractor
-
- /** An extractor class to create and pattern match with syntax `LabelDef(name, params, rhs)`.
- *
- * This AST node does not have direct correspondence to Scala code.
- * It is used for tailcalls and like.
- * For example, while/do are desugared to label defs as follows:
- *
- * while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
- * do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
- */
- abstract class LabelDefExtractor {
- def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
- def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)]
- }
-
- /** Import selector
- *
- * Representation of an imported name its optional rename and their optional positions
- *
- * Eliminated by typecheck.
- *
- * @param name the imported name
- * @param namePos its position or -1 if undefined
- * @param rename the name the import is renamed to (== name if no renaming)
- * @param renamePos the position of the rename or -1 if undefined
- */
- type ImportSelector >: Null <: AnyRef
-
- /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ImportSelectorTag: ClassTag[ImportSelector]
-
- /** The constructor/deconstructor for `ImportSelector` instances. */
- val ImportSelector: ImportSelectorExtractor
-
- /** An extractor class to create and pattern match with syntax `ImportSelector(name:, namePos, rename, renamePos)`.
- * This is not an AST node, it is used as a part of the `Import` node.
- */
- abstract class ImportSelectorExtractor {
- def apply(name: Name, namePos: Int, rename: Name, renamePos: Int): ImportSelector
- def unapply(importSelector: ImportSelector): Option[(Name, Int, Name, Int)]
- }
-
- /** Import clause
- *
- * @param expr
- * @param selectors
- */
- type Import >: Null <: SymTree
-
- /** A tag that preserves the identity of the `Import` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ImportTag: ClassTag[Import]
-
- /** The constructor/deconstructor for `Import` instances. */
- val Import: ImportExtractor
-
- /** An extractor class to create and pattern match with syntax `Import(expr, selectors)`.
- * This AST node corresponds to the following Scala code:
- *
- * import expr.{selectors}
- *
- * Selectors are a list of pairs of names (from, to). // [Eugene++] obviously, they no longer are. please, document!
- * The last (and maybe only name) may be a nme.WILDCARD. For instance:
- *
- * import qual.{x, y => z, _}
- *
- * Would be represented as:
- *
- * Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
- *
- * The symbol of an `Import` is an import symbol @see Symbol.newImport.
- * It's used primarily as a marker to check that the import has been typechecked.
- */
- abstract class ImportExtractor {
- def apply(expr: Tree, selectors: List[ImportSelector]): Import
- def unapply(import_ : Import): Option[(Tree, List[ImportSelector])]
- }
-
- /** Instantiation template of a class or trait
- *
- * @param parents
- * @param body
- */
- type Template >: Null <: SymTree
-
- /** A tag that preserves the identity of the `Template` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TemplateTag: ClassTag[Template]
-
- /** The constructor/deconstructor for `Template` instances. */
- val Template: TemplateExtractor
-
- /** An extractor class to create and pattern match with syntax `Template(parents, self, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * `extends` parents { self => body }
- *
- * In case when the self-type annotation is missing, it is represented as
- * an empty value definition with nme.WILDCARD as name and NoType as type.
- *
- * The symbol of a template is a local dummy. @see Symbol.newLocalDummy
- * The owner of the local dummy is the enclosing trait or class.
- * The local dummy is itself the owner of any local blocks. For example:
- *
- * class C {
- * def foo { // owner is C
- * def bar // owner is local dummy
- * }
- * }
- */
- abstract class TemplateExtractor {
- def apply(parents: List[Tree], self: ValDef, body: List[Tree]): Template
- def unapply(template: Template): Option[(List[Tree], ValDef, List[Tree])]
- }
-
- /** Block of expressions (semicolon separated expressions) */
- type Block >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Block` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val BlockTag: ClassTag[Block]
-
- /** The constructor/deconstructor for `Block` instances. */
- val Block: BlockExtractor
-
- /** An extractor class to create and pattern match with syntax `Block(stats, expr)`.
- * This AST node corresponds to the following Scala code:
- *
- * { stats; expr }
- *
- * If the block is empty, the `expr` is set to `Literal(Constant(()))`. // [Eugene++] check this
- */
- abstract class BlockExtractor {
- def apply(stats: List[Tree], expr: Tree): Block
- def unapply(block: Block): Option[(List[Tree], Tree)]
- }
-
- /** Case clause in a pattern match, eliminated during explicitouter
- * (except for occurrences in switch statements).
- * Eliminated by patmat/explicitouter.
- */
- type CaseDef >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val CaseDefTag: ClassTag[CaseDef]
-
- /** The constructor/deconstructor for `CaseDef` instances. */
- val CaseDef: CaseDefExtractor
-
- /** An extractor class to create and pattern match with syntax `CaseDef(pat, guard, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * `case` pat `if` guard => body
- *
- * If the guard is not present, the `guard` is set to `EmptyTree`. // [Eugene++] check this
- * If the body is not specified, the `body` is set to `EmptyTree`. // [Eugene++] check this
- */
- abstract class CaseDefExtractor {
- def apply(pat: Tree, guard: Tree, body: Tree): CaseDef
- def unapply(caseDef: CaseDef): Option[(Tree, Tree, Tree)]
- }
-
- /** Alternatives of patterns, eliminated by explicitouter, except for
- * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...)))
- * Eliminated by patmat/explicitouter.
- */
- type Alternative >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AlternativeTag: ClassTag[Alternative]
-
- /** The constructor/deconstructor for `Alternative` instances. */
- val Alternative: AlternativeExtractor
-
- /** An extractor class to create and pattern match with syntax `Alternative(trees)`.
- * This AST node corresponds to the following Scala code:
- *
- * pat1 | ... | patn
- */
- abstract class AlternativeExtractor {
- def apply(trees: List[Tree]): Alternative
- def unapply(alternative: Alternative): Option[List[Tree]]
- }
-
- /** Repetition of pattern.
- * Eliminated by patmat/explicitouter.
- */
- type Star >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Star` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val StarTag: ClassTag[Star]
-
- /** The constructor/deconstructor for `Star` instances. */
- val Star: StarExtractor
-
- /** An extractor class to create and pattern match with syntax `Star(elem)`.
- * This AST node corresponds to the following Scala code:
- *
- * pat*
- */
- abstract class StarExtractor {
- def apply(elem: Tree): Star
- def unapply(star: Star): Option[Tree]
- }
-
- /** Bind of a variable to a rhs pattern, eliminated by explicitouter
- * Eliminated by patmat/explicitouter.
- *
- * @param name
- * @param body
- */
- type Bind >: Null <: DefTree
-
- /** A tag that preserves the identity of the `Bind` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val BindTag: ClassTag[Bind]
-
- /** The constructor/deconstructor for `Bind` instances. */
- val Bind: BindExtractor
-
- /** An extractor class to create and pattern match with syntax `Bind(name, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * pat*
- */
- abstract class BindExtractor {
- def apply(name: Name, body: Tree): Bind
- def unapply(bind: Bind): Option[(Name, Tree)]
- }
-
- /** Noone knows what this is.
- * It is not idempotent w.r.t typechecking.
- * Can we, please, remove it?
- * Introduced by typer, eliminated by patmat/explicitouter.
- */
- type UnApply >: Null <: TermTree
-
- /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val UnApplyTag: ClassTag[UnApply]
-
- /** The constructor/deconstructor for `UnApply` instances. */
- val UnApply: UnApplyExtractor
-
- /** An extractor class to create and pattern match with syntax `UnApply(fun, args)`.
- * This AST node does not have direct correspondence to Scala code,
- * and is introduced when typechecking pattern matches and `try` blocks.
- */
- abstract class UnApplyExtractor {
- def apply(fun: Tree, args: List[Tree]): UnApply
- def unapply(unApply: UnApply): Option[(Tree, List[Tree])]
- }
-
- /** Array of expressions, needs to be translated in backend.
- * This AST node is used to pass arguments to vararg arguments.
- * Introduced by uncurry.
- */
- type ArrayValue >: Null <: TermTree
-
- /** A tag that preserves the identity of the `ArrayValue` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ArrayValueTag: ClassTag[ArrayValue]
-
- /** The constructor/deconstructor for `ArrayValue` instances. */
- val ArrayValue: ArrayValueExtractor
-
- /** An extractor class to create and pattern match with syntax `ArrayValue(elemtpt, elems)`.
- * This AST node does not have direct correspondence to Scala code,
- * and is used to pass arguments to vararg arguments. For instance:
- *
- * printf("%s%d", foo, 42)
- *
- * Is translated to after uncurry to:
- *
- * Apply(
- * Ident("printf"),
- * Literal("%s%d"),
- * ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
- */
- abstract class ArrayValueExtractor {
- def apply(elemtpt: Tree, elems: List[Tree]): ArrayValue
- def unapply(arrayValue: ArrayValue): Option[(Tree, List[Tree])]
- }
-
- /** Anonymous function, eliminated by lambdalift */
- type Function >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `Function` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val FunctionTag: ClassTag[Function]
-
- /** The constructor/deconstructor for `Function` instances. */
- val Function: FunctionExtractor
-
- /** An extractor class to create and pattern match with syntax `Function(vparams, body)`.
- * This AST node corresponds to the following Scala code:
- *
- * vparams => body
- *
- * The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
- * It is the owner of the function's parameters.
- */
- abstract class FunctionExtractor {
- def apply(vparams: List[ValDef], body: Tree): Function
- def unapply(function: Function): Option[(List[ValDef], Tree)]
- }
-
- /** Assignment */
- type Assign >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Assign` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AssignTag: ClassTag[Assign]
-
- /** The constructor/deconstructor for `Assign` instances. */
- val Assign: AssignExtractor
-
- /** An extractor class to create and pattern match with syntax `Assign(lhs, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * lhs = rhs
- */
- abstract class AssignExtractor {
- def apply(lhs: Tree, rhs: Tree): Assign
- def unapply(assign: Assign): Option[(Tree, Tree)]
- }
-
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply), resurrected by reifier.
- */
- type AssignOrNamedArg >: Null <: TermTree
-
- /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
-
- /** The constructor/deconstructor for `AssignOrNamedArg` instances. */
- val AssignOrNamedArg: AssignOrNamedArgExtractor
-
- /** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`.
- * This AST node corresponds to the following Scala code:
- *
- * @annotation(lhs = rhs)
- *
- * m.f(lhs = rhs)
- */
- abstract class AssignOrNamedArgExtractor {
- def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg
- def unapply(assignOrNamedArg: AssignOrNamedArg): Option[(Tree, Tree)]
- }
-
- /** Conditional expression */
- type If >: Null <: TermTree
-
- /** A tag that preserves the identity of the `If` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val IfTag: ClassTag[If]
-
- /** The constructor/deconstructor for `If` instances. */
- val If: IfExtractor
-
- /** An extractor class to create and pattern match with syntax `If(cond, thenp, elsep)`.
- * This AST node corresponds to the following Scala code:
- *
- * `if` (cond) thenp `else` elsep
- *
- * If the alternative is not present, the `elsep` is set to `EmptyTree`. // [Eugene++] check this
- */
- abstract class IfExtractor {
- def apply(cond: Tree, thenp: Tree, elsep: Tree): If
- def unapply(if_ : If): Option[(Tree, Tree, Tree)]
- }
-
- /** - Pattern matching expression (before explicitouter)
- * - Switch statements (after explicitouter)
- *
- * After explicitouter, cases will satisfy the following constraints:
- *
- * - all guards are `EmptyTree`,
- * - all patterns will be either `Literal(Constant(x:Int))`
- * or `Alternative(lit|...|lit)`
- * - except for an "otherwise" branch, which has pattern
- * `Ident(nme.WILDCARD)`
- */
- type Match >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Match` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MatchTag: ClassTag[Match]
-
- /** The constructor/deconstructor for `Match` instances. */
- val Match: MatchExtractor
-
- /** An extractor class to create and pattern match with syntax `Match(selector, cases)`.
- * This AST node corresponds to the following Scala code:
- *
- * selector `match` { cases }
- *
- * // [Eugene++] say something about `val (foo, bar) = baz` and likes.
- */
- abstract class MatchExtractor {
- def apply(selector: Tree, cases: List[CaseDef]): Match
- def unapply(match_ : Match): Option[(Tree, List[CaseDef])]
- }
-
- /** Return expression */
- type Return >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `Return` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ReturnTag: ClassTag[Return]
-
- /** The constructor/deconstructor for `Return` instances. */
- val Return: ReturnExtractor
-
- /** An extractor class to create and pattern match with syntax `Return(expr)`.
- * This AST node corresponds to the following Scala code:
- *
- * `return` expr
- *
- * The symbol of a Return node is the enclosing method
- */
- abstract class ReturnExtractor {
- def apply(expr: Tree): Return
- def unapply(return_ : Return): Option[Tree]
- }
-
- /** [Eugene++] comment me! */
- type Try >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Try` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TryTag: ClassTag[Try]
-
- /** The constructor/deconstructor for `Try` instances. */
- val Try: TryExtractor
-
- /** An extractor class to create and pattern match with syntax `Try(block, catches, finalizer)`.
- * This AST node corresponds to the following Scala code:
- *
- * `try` block `catch` { catches } `finally` finalizer
- *
- * If the finalizer is not present, the `finalizer` is set to `EmptyTree`. // [Eugene++] check this
- */
- abstract class TryExtractor {
- def apply(block: Tree, catches: List[CaseDef], finalizer: Tree): Try
- def unapply(try_ : Try): Option[(Tree, List[CaseDef], Tree)]
- }
-
- /** Throw expression */
- type Throw >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Throw` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ThrowTag: ClassTag[Throw]
-
- /** The constructor/deconstructor for `Throw` instances. */
- val Throw: ThrowExtractor
-
- /** An extractor class to create and pattern match with syntax `Throw(expr)`.
- * This AST node corresponds to the following Scala code:
- *
- * `throw` expr
- */
- abstract class ThrowExtractor {
- def apply(expr: Tree): Throw
- def unapply(throw_ : Throw): Option[Tree]
- }
-
- /** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
- */
- type New >: Null <: TermTree
-
- /** A tag that preserves the identity of the `New` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val NewTag: ClassTag[New]
-
- /** The constructor/deconstructor for `New` instances. */
- val New: NewExtractor
-
- /** An extractor class to create and pattern match with syntax `New(tpt)`.
- * This AST node corresponds to the following Scala code:
- *
- * `new` T
- *
- * This node always occurs in the following context:
- *
- * (`new` tpt).<init>[targs](args)
- */
- abstract class NewExtractor {
- def apply(tpt: Tree): New
- def unapply(new_ : New): Option[Tree]
- }
-
- /** Type annotation, eliminated by cleanup */
- type Typed >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Typed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypedTag: ClassTag[Typed]
-
- /** The constructor/deconstructor for `Typed` instances. */
- val Typed: TypedExtractor
-
- /** An extractor class to create and pattern match with syntax `Typed(expr, tpt)`.
- * This AST node corresponds to the following Scala code:
- *
- * expr: tpt
- */
- abstract class TypedExtractor {
- def apply(expr: Tree, tpt: Tree): Typed
- def unapply(typed: Typed): Option[(Tree, Tree)]
- }
-
- /** Common base class for Apply and TypeApply. This could in principle
- * be a SymTree, but whether or not a Tree is a SymTree isn't used
- * to settle any interesting questions, and it would add a useless
- * field to all the instances (useless, since GenericApply forwards to
- * the underlying fun.)
- */
- type GenericApply >: Null <: TermTree
-
- /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val GenericApplyTag: ClassTag[GenericApply]
-
- /** Explicit type application.
- * @PP: All signs point toward it being a requirement that args.nonEmpty,
- * but I can't find that explicitly stated anywhere. Unless your last name
- * is odersky, you should probably treat it as true.
- */
- type TypeApply >: Null <: GenericApply
-
- /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeApplyTag: ClassTag[TypeApply]
-
- /** The constructor/deconstructor for `TypeApply` instances. */
- val TypeApply: TypeApplyExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeApply(fun, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun[args]
- */
- abstract class TypeApplyExtractor {
- def apply(fun: Tree, args: List[Tree]): TypeApply
- def unapply(typeApply: TypeApply): Option[(Tree, List[Tree])]
- }
-
- /** Value application */
- type Apply >: Null <: GenericApply
-
- /** A tag that preserves the identity of the `Apply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ApplyTag: ClassTag[Apply]
-
- /** The constructor/deconstructor for `Apply` instances. */
- val Apply: ApplyExtractor
-
- /** An extractor class to create and pattern match with syntax `Apply(fun, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun(args)
- *
- * For instance:
- *
- * fun[targs](args)
- *
- * Is expressed as:
- *
- * Apply(TypeApply(fun, targs), args)
- */
- abstract class ApplyExtractor {
- def apply(fun: Tree, args: List[Tree]): Apply
- def unapply(apply: Apply): Option[(Tree, List[Tree])]
- }
-
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- * [Eugene++] what is it used for?
- * Introduced by erasure, eliminated by cleanup.
- */
- type ApplyDynamic >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `ApplyDynamic` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ApplyDynamicTag: ClassTag[ApplyDynamic]
-
- /** The constructor/deconstructor for `ApplyDynamic` instances. */
- val ApplyDynamic: ApplyDynamicExtractor
-
- /** An extractor class to create and pattern match with syntax `ApplyDynamic(qual, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * fun(args)
- *
- * The symbol of an ApplyDynamic is the function symbol of `qual`, or NoSymbol, if there is none.
- */
- abstract class ApplyDynamicExtractor {
- def apply(qual: Tree, args: List[Tree]): ApplyDynamic
- def unapply(applyDynamic: ApplyDynamic): Option[(Tree, List[Tree])]
- }
-
- /** Super reference, qual = corresponding this reference
- * A super reference C.super[M] is represented as Super(This(C), M).
- */
- type Super >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Super` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SuperTag: ClassTag[Super]
-
- /** The constructor/deconstructor for `Super` instances. */
- val Super: SuperExtractor
-
- /** An extractor class to create and pattern match with syntax `Super(qual, mix)`.
- * This AST node corresponds to the following Scala code:
- *
- * C.super[M]
- *
- * Which is represented as:
- *
- * Super(This(C), M)
- *
- * If `mix` is empty, it is tpnme.EMPTY.
- *
- * The symbol of a Super is the class _from_ which the super reference is made.
- * For instance in C.super(...), it would be C.
- */
- abstract class SuperExtractor {
- def apply(qual: Tree, mix: TypeName): Super
- def unapply(super_ : Super): Option[(Tree, TypeName)]
- }
-
- /** Self reference */
- type This >: Null <: TermTree with SymTree
-
- /** A tag that preserves the identity of the `This` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ThisTag: ClassTag[This]
-
- /** The constructor/deconstructor for `This` instances. */
- val This: ThisExtractor
-
- /** An extractor class to create and pattern match with syntax `This(qual)`.
- * This AST node corresponds to the following Scala code:
- *
- * qual.this
- *
- * The symbol of a This is the class to which the this refers.
- * For instance in C.this, it would be C.
- *
- * If `mix` is empty, then ???
- */
- abstract class ThisExtractor {
- def apply(qual: TypeName): This
- def unapply(this_ : This): Option[TypeName]
- }
-
- /** Designator <qualifier> . <name> */
- type Select >: Null <: RefTree
-
- /** A tag that preserves the identity of the `Select` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SelectTag: ClassTag[Select]
-
- /** The constructor/deconstructor for `Select` instances. */
- val Select: SelectExtractor
-
- /** An extractor class to create and pattern match with syntax `Select(qual, name)`.
- * This AST node corresponds to the following Scala code:
- *
- * qualifier.selector
- */
- abstract class SelectExtractor {
- def apply(qualifier: Tree, name: Name): Select
- def unapply(select: Select): Option[(Tree, Name)]
- }
-
- /** Identifier <name> */
- type Ident >: Null <: RefTree
-
- /** A tag that preserves the identity of the `Ident` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val IdentTag: ClassTag[Ident]
-
- /** The constructor/deconstructor for `Ident` instances. */
- val Ident: IdentExtractor
-
- /** An extractor class to create and pattern match with syntax `Ident(qual, name)`.
- * This AST node corresponds to the following Scala code:
- *
- * name
- *
- * Type checker converts idents that refer to enclosing fields or methods to selects.
- * For example, name ==> this.name
- */
- abstract class IdentExtractor {
- def apply(name: Name): Ident
- def unapply(ident: Ident): Option[Name]
- }
-
- /** Marks underlying reference to id as boxed.
- * @pre id must refer to a captured variable
- * A reference such marked will refer to the boxed entity, no dereferencing
- * with `.elem` is done on it.
- * This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
- * It is eliminated in LambdaLift, where the boxing conversion takes place.
- */
- type ReferenceToBoxed >: Null <: TermTree
-
- /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
-
- /** The constructor/deconstructor for `ReferenceToBoxed` instances. */
- val ReferenceToBoxed: ReferenceToBoxedExtractor
-
- /** An extractor class to create and pattern match with syntax `ReferenceToBoxed(ident)`.
- * This AST node does not have direct correspondence to Scala code,
- * and is emitted by macros to reference capture vars directly without going through `elem`.
- *
- * For example:
- *
- * var x = ...
- * fun { x }
- *
- * Will emit:
- *
- * Ident(x)
- *
- * Which gets transformed to:
- *
- * Select(Ident(x), "elem")
- *
- * If `ReferenceToBoxed` were used instead of Ident, no transformation would be performed.
- */
- abstract class ReferenceToBoxedExtractor {
- def apply(ident: Ident): ReferenceToBoxed
- def unapply(referenceToBoxed: ReferenceToBoxed): Option[Ident]
- }
-
- /** Literal */
- type Literal >: Null <: TermTree
-
- /** A tag that preserves the identity of the `Literal` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val LiteralTag: ClassTag[Literal]
-
- /** The constructor/deconstructor for `Literal` instances. */
- val Literal: LiteralExtractor
-
- /** An extractor class to create and pattern match with syntax `Literal(value)`.
- * This AST node corresponds to the following Scala code:
- *
- * value
- */
- abstract class LiteralExtractor {
- def apply(value: Constant): Literal
- def unapply(literal: Literal): Option[Constant]
- }
-
- /** A tree that has an annotation attached to it. Only used for annotated types and
- * annotation ascriptions, annotations on definitions are stored in the Modifiers.
- * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
- * an AnnotatedType.
- */
- type Annotated >: Null <: AnyRef with Tree
-
- /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AnnotatedTag: ClassTag[Annotated]
-
- /** The constructor/deconstructor for `Annotated` instances. */
- val Annotated: AnnotatedExtractor
-
- /** An extractor class to create and pattern match with syntax `Annotated(annot, arg)`.
- * This AST node corresponds to the following Scala code:
- *
- * arg @annot // for types
- * arg: @annot // for exprs
- */
- abstract class AnnotatedExtractor {
- def apply(annot: Tree, arg: Tree): Annotated
- def unapply(annotated: Annotated): Option[(Tree, Tree)]
- }
-
- /** Singleton type, eliminated by RefCheck */
- type SingletonTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
-
- /** The constructor/deconstructor for `SingletonTypeTree` instances. */
- val SingletonTypeTree: SingletonTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `SingletonTypeTree(ref)`.
- * This AST node corresponds to the following Scala code:
- *
- * ref.type
- */
- abstract class SingletonTypeTreeExtractor {
- def apply(ref: Tree): SingletonTypeTree
- def unapply(singletonTypeTree: SingletonTypeTree): Option[Tree]
- }
-
- /** Type selection <qualifier> # <name>, eliminated by RefCheck */
- // [Eugene++] don't see why we need it, when we have Select
- type SelectFromTypeTree >: Null <: TypTree with RefTree
-
- /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
-
- /** The constructor/deconstructor for `SelectFromTypeTree` instances. */
- val SelectFromTypeTree: SelectFromTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `SelectFromTypeTree(qualifier, name)`.
- * This AST node corresponds to the following Scala code:
- *
- * qualifier # selector
- *
- * Note: a path-dependent type p.T is expressed as p.type # T
- */
- abstract class SelectFromTypeTreeExtractor {
- def apply(qualifier: Tree, name: TypeName): SelectFromTypeTree
- def unapply(selectFromTypeTree: SelectFromTypeTree): Option[(Tree, TypeName)]
- }
-
- /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
- type CompoundTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
-
- /** The constructor/deconstructor for `CompoundTypeTree` instances. */
- val CompoundTypeTree: CompoundTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `CompoundTypeTree(templ)`.
- * This AST node corresponds to the following Scala code:
- *
- * parent1 with ... with parentN { refinement }
- */
- abstract class CompoundTypeTreeExtractor {
- def apply(templ: Template): CompoundTypeTree
- def unapply(compoundTypeTree: CompoundTypeTree): Option[Template]
- }
-
- /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
- type AppliedTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
-
- /** The constructor/deconstructor for `AppliedTypeTree` instances. */
- val AppliedTypeTree: AppliedTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `AppliedTypeTree(tpt, args)`.
- * This AST node corresponds to the following Scala code:
- *
- * tpt[args]
- */
- abstract class AppliedTypeTreeExtractor {
- def apply(tpt: Tree, args: List[Tree]): AppliedTypeTree
- def unapply(appliedTypeTree: AppliedTypeTree): Option[(Tree, List[Tree])]
- }
-
- /** Document me! */
- type TypeBoundsTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
-
- /** The constructor/deconstructor for `TypeBoundsTree` instances. */
- val TypeBoundsTree: TypeBoundsTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeBoundsTree(lo, hi)`.
- * This AST node corresponds to the following Scala code:
- *
- * >: lo <: hi
- */
- abstract class TypeBoundsTreeExtractor {
- def apply(lo: Tree, hi: Tree): TypeBoundsTree
- def unapply(typeBoundsTree: TypeBoundsTree): Option[(Tree, Tree)]
- }
-
- /** Document me! */
- type ExistentialTypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
-
- /** The constructor/deconstructor for `ExistentialTypeTree` instances. */
- val ExistentialTypeTree: ExistentialTypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `ExistentialTypeTree(tpt, whereClauses)`.
- * This AST node corresponds to the following Scala code:
- *
- * tpt forSome { whereClauses }
- */
- abstract class ExistentialTypeTreeExtractor {
- def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
- def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])]
- }
-
- /** A synthetic tree holding an arbitrary type. Not to be confused with
- * with TypTree, the trait for trees that are only used for type trees.
- * TypeTree's are inserted in several places, but most notably in
- * `RefCheck`, where the arbitrary type trees are all replaced by
- * TypeTree's. */
- type TypeTree >: Null <: TypTree
-
- /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeTreeTag: ClassTag[TypeTree]
-
- /** The constructor/deconstructor for `TypeTree` instances. */
- val TypeTree: TypeTreeExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeTree()`.
- * This AST node does not have direct correspondence to Scala code,
- * and is emitted by everywhere when we want to wrap a `Type` in a `Tree`.
- */
- abstract class TypeTreeExtractor {
- def apply(): TypeTree
- def unapply(typeTree: TypeTree): Boolean
- }
-
- /** ... */
- type Modifiers >: Null <: ModifiersBase
-
- /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ModifiersTag: ClassTag[Modifiers]
-
- /** ... */
- abstract class ModifiersBase extends HasFlagsBase {
- def privateWithin: Name // default: EmptyTypeName
- def annotations: List[Tree] // default: List()
- def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
- Modifiers(flags, privateWithin, f(annotations))
- }
-
- val Modifiers: ModifiersCreator
-
- abstract class ModifiersCreator {
- def apply(): Modifiers = Modifiers(NoFlags, EmptyTypeName, List())
- def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
- }
-
- def Modifiers(flags: FlagSet, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List())
- def Modifiers(flags: FlagSet): Modifiers = Modifiers(flags, EmptyTypeName)
-
- /** ... */
- lazy val NoMods = Modifiers()
-
- // [Eugene++] temporarily moved here until SI-5863 is fixed
-// ---------------------- factories ----------------------------------------------
-
- /** @param sym the class symbol
- * @param impl the implementation template
- */
- def ClassDef(sym: Symbol, impl: Template): ClassDef
-
- /**
- * @param sym the class symbol
- * @param impl the implementation template
- */
- def ModuleDef(sym: Symbol, impl: Template): ModuleDef
-
- def ValDef(sym: Symbol, rhs: Tree): ValDef
-
- def ValDef(sym: Symbol): ValDef
-
- def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, rhs: Tree): DefDef
-
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef
-
- /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
- def TypeDef(sym: Symbol, rhs: Tree): TypeDef
-
- /** A TypeDef node which defines abstract type or type parameter for given `sym` */
- def TypeDef(sym: Symbol): TypeDef
-
- def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef
-
- /** Block factory that flattens directly nested blocks.
- */
- def Block(stats: Tree*): Block
-
- /** casedef shorthand */
- def CaseDef(pat: Tree, body: Tree): CaseDef
-
- def Bind(sym: Symbol, body: Tree): Bind
-
- def Try(body: Tree, cases: (Tree, Tree)*): Try
-
- def Throw(tpe: Type, args: Tree*): Throw
-
- /** Factory method for object creation `new tpt(args_1)...(args_n)`
- * A `New(t, as)` is expanded to: `(new t).<init>(as)`
- */
- def New(tpt: Tree, argss: List[List[Tree]]): Tree
-
- /** 0-1 argument list new, based on a type.
- */
- def New(tpe: Type, args: Tree*): Tree
-
- def New(sym: Symbol, args: Tree*): Tree
-
- def Apply(sym: Symbol, args: Tree*): Tree
-
- def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
-
- def Super(sym: Symbol, mix: TypeName): Tree
-
- def This(sym: Symbol): Tree
-
- def Select(qualifier: Tree, name: String): Select
-
- def Select(qualifier: Tree, sym: Symbol): Select
-
- def Ident(name: String): Ident
-
- def Ident(sym: Symbol): Ident
-
- def TypeTree(tp: Type): TypeTree
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/TypeCreator.scala b/src/library/scala/reflect/base/TypeCreator.scala
deleted file mode 100644
index 8a14e53dd3..0000000000
--- a/src/library/scala/reflect/base/TypeCreator.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala.reflect
-package base
-
-abstract class TypeCreator {
- def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type
-}
diff --git a/src/library/scala/reflect/base/TypeTags.scala b/src/library/scala/reflect/base/TypeTags.scala
deleted file mode 100644
index b673122d00..0000000000
--- a/src/library/scala/reflect/base/TypeTags.scala
+++ /dev/null
@@ -1,274 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package base
-
-import java.lang.{ Class => jClass }
-import language.implicitConversions
-
-/**
- * Type tags encapsulate a representation of type T.
- * They are supposed to replace the pre-2.10 concept of a [[scala.reflect.Manifest]].
- * TypeTags are much better integrated with reflection than manifests are, and are consequently much simpler.
- *
- * === Overview ===
- *
- * Type tags are organized in a hierarchy of three classes:
- * [[scala.reflect.ClassTag]], [[scala.reflect.base.Universe#TypeTag]] and [[scala.reflect.base.Universe#AbsTypeTag]].
- *
- * A [[scala.reflect.ClassTag]] carries a runtime class that corresponds to the source type T.
- * As of such, it possesses the knowledge about how to build single- and multi-dimensional arrays of elements of that type.
- * It guarantees that the source type T did not to contain any references to type parameters or abstract types.
- * [[scala.reflect.ClassTag]] corresponds to a previous notion of [[scala.reflect.ClassManifest]].
- *
- * A [[scala.reflect.base.Universe#AbsTypeTag]] value wraps a full Scala type in its tpe field.
- * A [[scala.reflect.base.Universe#TypeTag]] value is an [[scala.reflect.base.Universe#AbsTypeTag]]
- * that is guaranteed not to contain any references to type parameters or abstract types.
- *
- * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
- * [Eugene++] migratability between mirrors and universes is also worth mentioning
- *
- * === Splicing ===
- *
- * Tags can be spliced, i.e. if compiler generates a tag for a type that contains references to tagged
- * type parameters or abstract type members, it will retrieve the corresponding tag and embed it into the result.
- * An example that illustrates the TypeTag embedding, consider the following function:
- *
- * import reflect.mirror._
- * def f[T: TypeTag, U] = {
- * type L = T => U
- * implicitly[AbsTypeTag[L]]
- * }
- *
- * Then a call of f[String, Int] will yield a result of the form
- *
- * AbsTypeTag(<[ String => U ]>).
- *
- * Note that T has been replaced by String, because it comes with a TypeTag in f, whereas U was left as a type parameter.
- *
- * === AbsTypeTag vs TypeTag ===
- *
- * Be careful with AbsTypeTag, because it will reify types even if these types are abstract.
- * This makes it easy to forget to tag one of the methods in the call chain and discover it much later in the runtime
- * by getting cryptic errors far away from their source. For example, consider the following snippet:
- *
- * def bind[T: AbsTypeTag](name: String, value: T): IR.Result = bind((name, value))
- * def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
- * object NamedParam {
- * implicit def namedValue[T: AbsTypeTag](name: String, x: T): NamedParam = apply(name, x)
- * def apply[T: AbsTypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
- * }
- *
- * This fragment of Scala REPL implementation defines a `bind` function that carries a named value along with its type
- * into the heart of the REPL. Using a [[scala.reflect.base.Universe#AbsTypeTag]] here is reasonable, because it is desirable
- * to work with all types, even if they are type parameters or abstract type members.
- *
- * However if any of the three `AbsTypeTag` context bounds is omitted, the resulting code will be incorrect,
- * because the missing `AbsTypeTag` will be transparently generated by the compiler, carrying meaningless information.
- * Most likely, this problem will manifest itself elsewhere, making debugging complicated.
- * If `AbsTypeTag` context bounds were replaced with `TypeTag`, then such errors would be reported statically.
- * But in that case we wouldn't be able to use `bind` in arbitrary contexts.
- *
- * === Backward compatibility ===
- *
- * Type tags correspond loosely to manifests.
- *
- * More precisely:
- * The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
- * The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.runtime.universe.TypeTag,
- *
- * In Scala 2.10, manifests are deprecated, so it's adviseable to migrate them to tags,
- * because manifests might be removed in the next major release.
- *
- * In most cases it will be enough to replace ClassManifests with ClassTags and Manifests with TypeTags,
- * however there are a few caveats:
- *
- * 1) The notion of OptManifest is no longer supported. Tags can reify arbitrary types, so they are always available.
- * // [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
- *
- * 2) There's no equivalent for AnyValManifest. Consider comparing your tag with one of the base tags
- * (defined in the corresponding companion objects) to find out whether it represents a primitive value class.
- * You can also use `<tag>.tpe.typeSymbol.isPrimitiveValueClass` for that purpose (requires scala-reflect.jar).
- *
- * 3) There's no replacement for factory methods defined in `ClassManifest` and `Manifest` companion objects.
- * Consider assembling corresponding types using reflection API provided by Java (for classes) and Scala (for types).
- *
- * 4) Certain manifest functions (such as `<:<`, `>:>` and `typeArguments`) weren't included in the tag API.
- * Consider using reflection API provided by Java (for classes) and Scala (for types) instead.
- */
-// [Eugene++] implement serialization for typetags
-trait TypeTags { self: Universe =>
-
- /**
- * If an implicit value of type u.AbsTypeTag[T] is required, the compiler will make one up on demand.
- * The implicitly created value contains in its tpe field a value of type u.Type that is a reflective representation of T.
- * In that value, any occurrences of type parameters or abstract types U
- * which come themselves with a TypeTag are represented by the type referenced by that TypeTag.
- *
- * @see [[scala.reflect.base.TypeTags]]
- */
- @annotation.implicitNotFound(msg = "No AbsTypeTag available for ${T}")
- trait AbsTypeTag[T] extends Equals with Serializable {
- val mirror: Mirror
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # AbsTypeTag[T]
- def tpe: Type
-
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[AbsTypeTag[_]]
- override def equals(x: Any) = x.isInstanceOf[AbsTypeTag[_]] && this.mirror == x.asInstanceOf[AbsTypeTag[_]].mirror && this.tpe == x.asInstanceOf[AbsTypeTag[_]].tpe
- override def hashCode = mirror.hashCode * 31 + tpe.hashCode
- override def toString = "AbsTypeTag[" + tpe + "]"
- }
-
- object AbsTypeTag {
- val Byte : AbsTypeTag[scala.Byte] = TypeTag.Byte
- val Short : AbsTypeTag[scala.Short] = TypeTag.Short
- val Char : AbsTypeTag[scala.Char] = TypeTag.Char
- val Int : AbsTypeTag[scala.Int] = TypeTag.Int
- val Long : AbsTypeTag[scala.Long] = TypeTag.Long
- val Float : AbsTypeTag[scala.Float] = TypeTag.Float
- val Double : AbsTypeTag[scala.Double] = TypeTag.Double
- val Boolean : AbsTypeTag[scala.Boolean] = TypeTag.Boolean
- val Unit : AbsTypeTag[scala.Unit] = TypeTag.Unit
- val Any : AbsTypeTag[scala.Any] = TypeTag.Any
- val AnyVal : AbsTypeTag[scala.AnyVal] = TypeTag.AnyVal
- val AnyRef : AbsTypeTag[scala.AnyRef] = TypeTag.AnyRef
- val Object : AbsTypeTag[java.lang.Object] = TypeTag.Object
- val Nothing : AbsTypeTag[scala.Nothing] = TypeTag.Nothing
- val Null : AbsTypeTag[scala.Null] = TypeTag.Null
-
- def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): AbsTypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => AbsTypeTag.Byte.asInstanceOf[AbsTypeTag[T]]
- case ShortTpe => AbsTypeTag.Short.asInstanceOf[AbsTypeTag[T]]
- case CharTpe => AbsTypeTag.Char.asInstanceOf[AbsTypeTag[T]]
- case IntTpe => AbsTypeTag.Int.asInstanceOf[AbsTypeTag[T]]
- case LongTpe => AbsTypeTag.Long.asInstanceOf[AbsTypeTag[T]]
- case FloatTpe => AbsTypeTag.Float.asInstanceOf[AbsTypeTag[T]]
- case DoubleTpe => AbsTypeTag.Double.asInstanceOf[AbsTypeTag[T]]
- case BooleanTpe => AbsTypeTag.Boolean.asInstanceOf[AbsTypeTag[T]]
- case UnitTpe => AbsTypeTag.Unit.asInstanceOf[AbsTypeTag[T]]
- case AnyTpe => AbsTypeTag.Any.asInstanceOf[AbsTypeTag[T]]
- case AnyValTpe => AbsTypeTag.AnyVal.asInstanceOf[AbsTypeTag[T]]
- case AnyRefTpe => AbsTypeTag.AnyRef.asInstanceOf[AbsTypeTag[T]]
- case ObjectTpe => AbsTypeTag.Object.asInstanceOf[AbsTypeTag[T]]
- case NothingTpe => AbsTypeTag.Nothing.asInstanceOf[AbsTypeTag[T]]
- case NullTpe => AbsTypeTag.Null.asInstanceOf[AbsTypeTag[T]]
- case _ => new AbsTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
-
- def unapply[T](ttag: AbsTypeTag[T]): Option[Type] = Some(ttag.tpe)
- }
-
- private class AbsTypeTagImpl[T](val mirror: Mirror, val tpec: TypeCreator) extends AbsTypeTag[T] {
- lazy val tpe: Type = tpec[self.type](mirror)
- def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # AbsTypeTag[T] = {
- val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- otherMirror.universe.AbsTypeTag[T](otherMirror1, tpec)
- }
- private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false)
- }
-
- /**
- * If an implicit value of type u.TypeTag[T] is required, the compiler will make one up on demand following the same procedure as for TypeTags.
- * However, if the resulting type still contains references to type parameters or abstract types, a static error results.
- *
- * @see [[scala.reflect.base.TypeTags]]
- */
- @annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
- trait TypeTag[T] extends AbsTypeTag[T] with Equals with Serializable {
- override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T]
-
- /** case class accessories */
- override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
- override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.mirror == x.asInstanceOf[TypeTag[_]].mirror && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
- override def hashCode = mirror.hashCode * 31 + tpe.hashCode
- override def toString = "TypeTag[" + tpe + "]"
- }
-
- object TypeTag {
- val Byte: TypeTag[scala.Byte] = new PredefTypeTag[scala.Byte] (ByteTpe, _.TypeTag.Byte)
- val Short: TypeTag[scala.Short] = new PredefTypeTag[scala.Short] (ShortTpe, _.TypeTag.Short)
- val Char: TypeTag[scala.Char] = new PredefTypeTag[scala.Char] (CharTpe, _.TypeTag.Char)
- val Int: TypeTag[scala.Int] = new PredefTypeTag[scala.Int] (IntTpe, _.TypeTag.Int)
- val Long: TypeTag[scala.Long] = new PredefTypeTag[scala.Long] (LongTpe, _.TypeTag.Long)
- val Float: TypeTag[scala.Float] = new PredefTypeTag[scala.Float] (FloatTpe, _.TypeTag.Float)
- val Double: TypeTag[scala.Double] = new PredefTypeTag[scala.Double] (DoubleTpe, _.TypeTag.Double)
- val Boolean: TypeTag[scala.Boolean] = new PredefTypeTag[scala.Boolean] (BooleanTpe, _.TypeTag.Boolean)
- val Unit: TypeTag[scala.Unit] = new PredefTypeTag[scala.Unit] (UnitTpe, _.TypeTag.Unit)
- val Any: TypeTag[scala.Any] = new PredefTypeTag[scala.Any] (AnyTpe, _.TypeTag.Any)
- val AnyVal: TypeTag[scala.AnyVal] = new PredefTypeTag[scala.AnyVal] (AnyValTpe, _.TypeTag.AnyVal)
- val AnyRef: TypeTag[scala.AnyRef] = new PredefTypeTag[scala.AnyRef] (AnyRefTpe, _.TypeTag.AnyRef)
- val Object: TypeTag[java.lang.Object] = new PredefTypeTag[java.lang.Object] (ObjectTpe, _.TypeTag.Object)
- val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing)
- val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
-
- def apply[T](mirror1: MirrorOf[self.type], tpec1: TypeCreator): TypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
- case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
- case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
- case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]]
- case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]]
- case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]]
- case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]]
- case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
- case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
- case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
- case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
- case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
- case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
- case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
- case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
-
- def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
- }
-
- private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends AbsTypeTagImpl[T](mirror, tpec) with TypeTag[T] {
- override def in[U <: Universe with Singleton](otherMirror: MirrorOf[U]): U # TypeTag[T] = {
- val otherMirror1 = otherMirror.asInstanceOf[MirrorOf[otherMirror.universe.type]]
- otherMirror.universe.TypeTag[T](otherMirror1, tpec)
- }
- private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
- }
-
- private class PredefTypeCreator[T](copyIn: Universe => Universe # TypeTag[T]) extends TypeCreator {
- def apply[U <: Universe with Singleton](m: MirrorOf[U]): U # Type = {
- copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
- }
- }
-
- private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe # TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
- override lazy val tpe: Type = _tpe
- private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
- }
-
- // incantations
- def typeTag[T](implicit ttag: TypeTag[T]) = ttag
-
- // big thanks to Viktor Klang for this brilliant idea!
- def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
-}
-
-private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable {
- private def writeObject(out: java.io.ObjectOutputStream): Unit = {
- out.writeObject(tpec)
- out.writeBoolean(concrete)
- }
-
- private def readObject(in: java.io.ObjectInputStream): Unit = {
- tpec = in.readObject().asInstanceOf[TypeCreator]
- concrete = in.readBoolean()
- }
-
- private def readResolve(): AnyRef = {
- import scala.reflect.basis._
- if (concrete) TypeTag(rootMirror, tpec)
- else AbsTypeTag(rootMirror, tpec)
- }
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/base/Types.scala b/src/library/scala/reflect/base/Types.scala
deleted file mode 100644
index 28aaf2d04d..0000000000
--- a/src/library/scala/reflect/base/Types.scala
+++ /dev/null
@@ -1,426 +0,0 @@
-package scala.reflect
-package base
-
-trait Types { self: Universe =>
-
- /** The type of Scala types, and also Scala type signatures.
- * (No difference is internally made between the two).
- */
- type Type >: Null <: TypeBase
-
- /** The base API that all types support */
- abstract class TypeBase
-
- /** A tag that preserves the identity of the `Type` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeTagg: ClassTag[Type] // [Eugene++] rename!
-
- /** This constant is used as a special value that indicates that no meaningful type exists.
- */
- val NoType: Type
-
- /** This constant is used as a special value denoting the empty prefix in a path dependent type.
- * For instance `x.type` is represented as `SingleType(NoPrefix, <x>)`, where `<x>` stands for
- * the symbol for `x`.
- */
- val NoPrefix: Type
-
- /** The type of Scala singleton types, i.e. types that are inhabited
- * by only one nun-null value. These include types of the forms
- * {{{
- * C.this.type
- * C.super.type
- * x.type
- * }}}
- * as well as constant types.
- */
- type SingletonType >: Null <: Type
-
- /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SingletonTypeTag: ClassTag[SingletonType]
-
- /** The `ThisType` type describes types of the form on the left with the
- * correspnding ThisType representations to the right.
- * {{{
- * C.this.type ThisType(C)
- * }}}
- */
- type ThisType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ThisTypeTag: ClassTag[ThisType]
-
- /** The constructor/deconstructor for `ThisType` instances. */
- val ThisType: ThisTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `ThisType(sym)`
- * where `sym` is the class prefix of the this type.
- */
- abstract class ThisTypeExtractor {
- def apply(sym: Symbol): Type // not ThisTypebecause of implementation details
- def unapply(tpe: ThisType): Option[Symbol]
- }
-
- /** The `SingleType` type describes types of any of the forms on the left,
- * with their TypeRef representations to the right.
- * {{{
- * (T # x).type SingleType(T, x)
- * p.x.type SingleType(p.type, x)
- * x.type SingleType(NoPrefix, x)
- * }}}
- */
- type SingleType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SingleTypeTag: ClassTag[SingleType]
-
- /** The constructor/deconstructor for `SingleType` instances. */
- val SingleType: SingleTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `SingleType(pre, sym)`
- * Here, `pre` is the prefix of the single-type, and `sym` is the stable value symbol
- * referred to by the single-type.
- */
- abstract class SingleTypeExtractor {
- def apply(pre: Type, sym: Symbol): Type // not SingleTypebecause of implementation details
- def unapply(tpe: SingleType): Option[(Type, Symbol)]
- }
-
- /** The `SuperType` type is not directly written, but arises when `C.super` is used
- * as a prefix in a `TypeRef` or `SingleType`. It's internal presentation is
- * {{{
- * SuperType(thistpe, supertpe)
- * }}}
- * Here, `thistpe` is the type of the corresponding this-type. For instance,
- * in the type arising from C.super, the `thistpe` part would be `ThisType(C)`.
- * `supertpe` is the type of the super class referred to by the `super`.
- */
- type SuperType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val SuperTypeTag: ClassTag[SuperType]
-
- /** The constructor/deconstructor for `SuperType` instances. */
- val SuperType: SuperTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `SingleType(thistpe, supertpe)`
- */
- abstract class SuperTypeExtractor {
- def apply(thistpe: Type, supertpe: Type): Type // not SuperTypebecause of implementation details
- def unapply(tpe: SuperType): Option[(Type, Type)]
- }
-
- /** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant.
- * The REPL expresses constant types like Int(11). Here are some constants with their types.
- * {{{
- * 1 ConstantType(Constant(1))
- * "abc" ConstantType(Constant("abc"))
- * }}}
- */
- type ConstantType >: Null <: AnyRef with SingletonType
-
- /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ConstantTypeTag: ClassTag[ConstantType]
-
- /** The constructor/deconstructor for `ConstantType` instances. */
- val ConstantType: ConstantTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `ConstantType(constant)`
- * Here, `constant` is the constant value represented by the type.
- */
- abstract class ConstantTypeExtractor {
- def apply(value: Constant): ConstantType
- def unapply(tpe: ConstantType): Option[Constant]
- }
-
- /** The `TypeRef` type describes types of any of the forms on the left,
- * with their TypeRef representations to the right.
- * {{{
- * T # C[T_1, ..., T_n] TypeRef(T, C, List(T_1, ..., T_n))
- * p.C[T_1, ..., T_n] TypeRef(p.type, C, List(T_1, ..., T_n))
- * C[T_1, ..., T_n] TypeRef(NoPrefix, C, List(T_1, ..., T_n))
- * T # C TypeRef(T, C, Nil)
- * p.C TypeRef(p.type, C, Nil)
- * C TypeRef(NoPrefix, C, Nil)
- * }}}
- */
- type TypeRef >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeRefTag: ClassTag[TypeRef]
-
- /** The constructor/deconstructor for `TypeRef` instances. */
- val TypeRef: TypeRefExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeRef(pre, sym, args)`
- * Here, `pre` is the prefix of the type reference, `sym` is the symbol
- * referred to by the type reference, and `args` is a possible empty list of
- * type argumenrts.
- */
- abstract class TypeRefExtractor {
- def apply(pre: Type, sym: Symbol, args: List[Type]): Type // not TypeRefbecause of implementation details
- def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
- }
-
- /** A subtype of Type representing refined types as well as `ClassInfo` signatures.
- */
- type CompoundType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val CompoundTypeTag: ClassTag[CompoundType]
-
- /** The `RefinedType` type defines types of any of the forms on the left,
- * with their RefinedType representations to the right.
- * {{{
- * P_1 with ... with P_m { D_1; ...; D_n} RefinedType(List(P_1, ..., P_m), Scope(D_1, ..., D_n))
- * P_1 with ... with P_m RefinedType(List(P_1, ..., P_m), Scope())
- * { D_1; ...; D_n} RefinedType(List(AnyRef), Scope(D_1, ..., D_n))
- * }}}
- */
- type RefinedType >: Null <: AnyRef with CompoundType
-
- /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val RefinedTypeTag: ClassTag[RefinedType]
-
- /** The constructor/deconstructor for `RefinedType` instances. */
- val RefinedType: RefinedTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `RefinedType(parents, decls)`
- * Here, `parents` is the list of parent types of the class, and `decls` is the scope
- * containing all declarations in the class.
- */
- abstract class RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope): RefinedType
-
- /** An alternative constructor that passes in the synthetic classs symbol
- * that backs the refined type. (Normally, a fresh class symbol is created automatically).
- */
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
- def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
- }
-
- /** The `ClassInfo` type signature is used to define parents and declarations
- * of classes, traits, and objects. If a class, trait, or object C is declared like this
- * {{{
- * C extends P_1 with ... with P_m { D_1; ...; D_n}
- * }}}
- * its `ClassInfo` type has the following form:
- * {{{
- * ClassInfo(List(P_1, ..., P_m), Scope(D_1, ..., D_n), C)
- * }}}
- */
- type ClassInfoType >: Null <: AnyRef with CompoundType
-
- /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
-
- /** The constructor/deconstructor for `ClassInfoType` instances. */
- val ClassInfoType: ClassInfoTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `ClassInfo(parents, decls, clazz)`
- * Here, `parents` is the list of parent types of the class, `decls` is the scope
- * containing all declarations in the class, and `clazz` is the symbol of the class
- * itself.
- */
- abstract class ClassInfoTypeExtractor {
- def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
- def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
- }
-
- /** The `MethodType` type signature is used to indicate parameters and result type of a method
- */
- type MethodType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val MethodTypeTag: ClassTag[MethodType]
-
- /** The constructor/deconstructor for `MethodType` instances. */
- val MethodType: MethodTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `MethodType(params, respte)`
- * Here, `params` is a potentially empty list of parameter symbols of the method,
- * and `restpe` is the result type of the method. If the method is curried, `restpe` would
- * be another `MethodType`.
- * Note: `MethodType(Nil, Int)` would be the type of a method defined with an empty parameter list.
- * {{{
- * def f(): Int
- * }}}
- * If the method is completely parameterless, as in
- * {{{
- * def f: Int
- * }}}
- * its type is a `NullaryMethodType`.
- */
- abstract class MethodTypeExtractor {
- def apply(params: List[Symbol], resultType: Type): MethodType
- def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
- }
-
- /** The `NullaryMethodType` type signature is used for parameterless methods
- * with declarations of the form `def foo: T`
- */
- type NullaryMethodType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
-
- /** The constructor/deconstructor for `NullaryMethodType` instances. */
- val NullaryMethodType: NullaryMethodTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `NullaryMethodType(resultType)`.
- * Here, `resultType` is the result type of the parameterless method.
- */
- abstract class NullaryMethodTypeExtractor {
- def apply(resultType: Type): NullaryMethodType
- def unapply(tpe: NullaryMethodType): Option[(Type)]
- }
-
- /** The `PolyType` type signature is used for polymorphic methods
- * that have at least one type parameter.
- */
- type PolyType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val PolyTypeTag: ClassTag[PolyType]
-
- /** The constructor/deconstructor for `PolyType` instances. */
- val PolyType: PolyTypeExtractor
-
- /** An extractor class to create and pattern match with syntax `PolyType(typeParams, resultType)`.
- * Here, `typeParams` are the type parameters of the method and `resultType`
- * is the type signature following the type parameters.
- */
- abstract class PolyTypeExtractor {
- def apply(typeParams: List[Symbol], resultType: Type): PolyType
- def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
- }
-
- /** The `ExistentialType` type signature is used for existential types and
- * wildcard types.
- */
- type ExistentialType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val ExistentialTypeTag: ClassTag[ExistentialType]
-
- /** The constructor/deconstructor for `ExistentialType` instances. */
- val ExistentialType: ExistentialTypeExtractor
-
- /** An extractor class to create and pattern match with syntax
- * `ExistentialType(quantified, underlying)`.
- * Here, `quantified` are the type variables bound by the existential type and `underlying`
- * is the type that's existentially quantified.
- */
- abstract class ExistentialTypeExtractor {
- def apply(quantified: List[Symbol], underlying: Type): ExistentialType
- def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
- }
-
- /** The `AnnotatedType` type signature is used for annotated types of the
- * for `<type> @<annotation>`.
- */
- type AnnotatedType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
-
- /** The constructor/deconstructor for `AnnotatedType` instances. */
- val AnnotatedType: AnnotatedTypeExtractor
-
- /** An extractor class to create and pattern match with syntax
- * `AnnotatedType(annotations, underlying, selfsym)`.
- * Here, `annotations` are the annotations decorating the underlying type `underlying`.
- * `selfSym` is a symbol representing the annotated type itself.
- */
- abstract class AnnotatedTypeExtractor {
- def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
- def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
- }
-
- /** The `TypeBounds` type signature is used to indicate lower and upper type bounds
- * of type parameters and abstract types. It is not a first-class type.
- * If an abstract type or type parameter is declared with any of the forms
- * on the left, its type signature is the TypeBounds type on the right.
- * {{{
- * T >: L <: U TypeBounds(L, U)
- * T >: L TypeBounds(L, Any)
- * T <: U TypeBounds(Nothing, U)
- * }}}
- */
- type TypeBounds >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val TypeBoundsTag: ClassTag[TypeBounds]
-
- /** The constructor/deconstructor for `TypeBounds` instances. */
- val TypeBounds: TypeBoundsExtractor
-
- /** An extractor class to create and pattern match with syntax `TypeBound(lower, upper)`
- * Here, `lower` is the lower bound of the `TypeBounds` pair, and `upper` is
- * the upper bound.
- */
- abstract class TypeBoundsExtractor {
- def apply(lo: Type, hi: Type): TypeBounds
- def unapply(tpe: TypeBounds): Option[(Type, Type)]
- }
-
- /** An object representing an unknown type, used during type inference.
- * If you see WildcardType outside of inference it is almost certainly a bug.
- */
- val WildcardType: Type
-
- /** BoundedWildcardTypes, used only during type inference, are created in
- * two places that I can find:
- *
- * 1. If the expected type of an expression is an existential type,
- * its hidden symbols are replaced with bounded wildcards.
- * 2. When an implicit conversion is being sought based in part on
- * the name of a method in the converted type, a HasMethodMatching
- * type is created: a MethodType with parameters typed as
- * BoundedWildcardTypes.
- */
- type BoundedWildcardType >: Null <: AnyRef with Type
-
- /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- */
- implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
-
- val BoundedWildcardType: BoundedWildcardTypeExtractor
-
- abstract class BoundedWildcardTypeExtractor {
- def apply(bounds: TypeBounds): BoundedWildcardType
- def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
- }
-}
diff --git a/src/library/scala/reflect/base/Universe.scala b/src/library/scala/reflect/base/Universe.scala
deleted file mode 100644
index f098876c18..0000000000
--- a/src/library/scala/reflect/base/Universe.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.reflect
-package base
-
-abstract class Universe extends Symbols
- with Types
- with FlagSets
- with Scopes
- with Names
- with Trees
- with Constants
- with AnnotationInfos
- with Positions
- with Exprs
- with TypeTags
- with TagInterop
- with StandardDefinitions
- with StandardNames
- with BuildUtils
- with Mirrors
-{
- /** Given an expression, generate a tree that when compiled and executed produces the original tree.
- * The produced tree will be bound to the Universe it was called from.
- *
- * For instance, given the abstract syntax tree representation of the <[ x + 1 ]> expression:
- *
- * {{{
- * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
- * }}}
- *
- * The reifier transforms it to the following expression:
- *
- * {{{
- * <[
- * val $u: u.type = u // where u is a reference to the Universe that calls the reify
- * $u.Expr[Int]($u.Apply($u.Select($u.Ident($u.newFreeVar("x", <Int>, x), "+"), List($u.Literal($u.Constant(1))))))
- * ]>
- * }}}
- *
- * Reification performs expression splicing (when processing Expr.splice)
- * and type splicing (for every type T that has a TypeTag[T] implicit in scope):
- *
- * {{{
- * val two = mirror.reify(2) // Literal(Constant(2))
- * val four = mirror.reify(two.splice + two.splice) // Apply(Select(two.tree, newTermName("$plus")), List(two.tree))
- *
- * def macroImpl[T](c: Context) = {
- * ...
- * // T here is just a type parameter, so the tree produced by reify won't be of much use in a macro expansion
- * // however, if T were annotated with c.AbsTypeTag (which would declare an implicit parameter for macroImpl)
- * // then reification would substitute T with the TypeTree that was used in a TypeApply of this particular macro invocation
- * val factory = c.reify{ new Queryable[T] }
- * ...
- * }
- * }}}
- *
- * The transformation looks mostly straightforward, but it has its tricky parts:
- * * Reifier retains symbols and types defined outside the reified tree, however
- * locally defined entities get erased and replaced with their original trees
- * * Free variables are detected and wrapped in symbols of the type FreeVar
- * * Mutable variables that are accessed from a local function are wrapped in refs
- * * Since reified trees can be compiled outside of the scope they've been created in,
- * special measures are taken to ensure that all members accessed in the reifee remain visible
- */
- // implementation is magically hardwired to `scala.reflect.reify.Taggers`
- def reify[T](expr: T): Expr[T] = ??? // macro
-} \ No newline at end of file
diff --git a/src/library/scala/reflect/macros/internal/macroImpl.scala b/src/library/scala/reflect/macros/internal/macroImpl.scala
index a7b2bf482c..b281fb7d12 100644
--- a/src/library/scala/reflect/macros/internal/macroImpl.scala
+++ b/src/library/scala/reflect/macros/internal/macroImpl.scala
@@ -15,4 +15,4 @@ package internal
* To lessen the weirdness we define this annotation as `private[scala]`.
* It will not prevent pickling, but it will prevent application developers (and scaladocs) from seeing the annotation.
*/
-private[scala] class macroImpl(val referenceToMacroImpl: Any) extends annotation.StaticAnnotation
+private[scala] class macroImpl(val referenceToMacroImpl: Any) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/macros/internal/package.scala b/src/library/scala/reflect/macros/internal/package.scala
deleted file mode 100644
index 912db53ed4..0000000000
--- a/src/library/scala/reflect/macros/internal/package.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.reflect.macros
-
-import scala.reflect.base.{Universe => BaseUniverse}
-import scala.reflect.ClassTag
-
-// anchors for materialization macros emitted during tag materialization in Implicits.scala
-// implementation is magically hardwired into `scala.reflect.reify.Taggers`
-//
-// todo. once we have implicit macros for tag generation, we can remove these anchors
-// [Eugene++] how do I hide this from scaladoc?
-package object internal {
- private[scala] def materializeClassTag[T](u: BaseUniverse): ClassTag[T] = ??? // macro
- private[scala] def materializeAbsTypeTag[T](u: BaseUniverse): u.AbsTypeTag[T] = ??? // macro
- private[scala] def materializeTypeTag[T](u: BaseUniverse): u.TypeTag[T] = ??? // macro
-}
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index d97f2ec633..4f1cc03dc8 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -2,8 +2,6 @@ package scala
package object reflect {
- lazy val basis: base.Universe = new base.Base
-
// in the new scheme of things ClassManifests are aliased to ClassTags
// this is done because we want `toArray` in collections work with ClassTags
// but changing it to use the ClassTag context bound without aliasing ClassManifest
@@ -42,13 +40,12 @@ package object reflect {
val Manifest = ManifestFactory
def classTag[T](implicit ctag: ClassTag[T]) = ctag
- // typeTag incantation is defined inside scala.reflect.basis and scala.reflect.runtime.universe
-
- // ClassTag class is defined in ClassTag.scala
- type TypeTag[T] = scala.reflect.basis.TypeTag[T]
- // ClassTag object is defined in ClassTag.scala
- lazy val TypeTag = scala.reflect.basis.TypeTag
+ // anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala
+ // implementation is hardwired into `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ // todo. once we have implicit macros for tag generation, we can remove this anchor
+ private[scala] def materializeClassTag[T](): ClassTag[T] = ??? // macro
@deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
type BeanDescription = scala.beans.BeanDescription
diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala
index bf6b440031..36893da298 100644
--- a/src/library/scala/remote.scala
+++ b/src/library/scala/remote.scala
@@ -24,4 +24,4 @@ package scala
* }
* }}}
*/
-class remote extends annotation.StaticAnnotation {}
+class remote extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index f499350ce9..c1f245590b 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -8,7 +8,8 @@
package scala.runtime
-/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction` in terms of `isDefinedAt` and `applyOrElse`.
+/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
+ * in terms of `isDefinedAt` and `applyOrElse`.
*
* This allows more efficient implementations in many cases:
* - optimized `orElse` method supports chained `orElse` in linear time,
@@ -16,12 +17,7 @@ package scala.runtime
* - optimized `lift` method helps to avoid double evaluation of pattern matchers & guards
* of partial function literals.
*
- * This trait is used as a basis for implementation of all partial function literals
- * with non-exhaustive matchers.
- *
- * Use of `AbstractPartialFunction` instead of `PartialFunction` as a base trait for
- * user-defined partial functions may result in better performance
- * and more predictable behavior w.r.t. side effects.
+ * This trait is used as a basis for implementation of all partial function literals.
*
* @author Pavel Pavlov
* @since 2.10
@@ -35,34 +31,4 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
// probably okay to make final since classes compiled before have overridden against the old version of AbstractPartialFunction
// let's not make it final so as not to confuse anyone
/*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
-
- @annotation.unspecialized override final def andThen[C](k: R => C) : PartialFunction[T1, C] =
- new AbstractPartialFunction[T1, C] {
- def isDefinedAt(x: T1): Boolean = self.isDefinedAt(x)
- override def applyOrElse[A1 <: T1, C1 >: C](x: A1, default: A1 => C1): C1 =
- self.applyOrElse(x, PartialFunction.fallbackToken) match {
- case PartialFunction.FallbackToken => default(x)
- case z => k(z)
- }
- }
-
- // TODO: remove
- protected def missingCase(x: T1): R = throw new MatchError(x)
-}
-
-
-/** `AbstractTotalFunction` is a partial function whose `isDefinedAt` method always returns `true`.
- *
- * This class is used as base class for partial function literals with
- * certainly exhaustive pattern matchers.
- *
- * @author Pavel Pavlov
- * @since 2.10
- */
-abstract class AbstractTotalFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] {
- final def isDefinedAt(x: T1): Boolean = true
- @annotation.unspecialized override final def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = apply(x)
- @annotation.unspecialized override final def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = this
- //TODO: check generated code for PF literal here
- @annotation.unspecialized override final def andThen[C](k: R => C): PartialFunction[T1, C] = { case x => k(apply(x)) }
}
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index a14160a71e..92cc6ccf98 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -8,6 +8,6 @@
package scala.runtime
-final class RichBoolean(val self: Boolean) extends OrderedProxy[Boolean] {
- protected val ord = math.Ordering[Boolean]
+final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] {
+ protected def ord = scala.math.Ordering.Boolean
}
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index c42a2dd183..9d88ed3689 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -8,4 +8,7 @@
package scala.runtime
-final class RichByte(val self: Byte) extends ScalaWholeNumberProxy[Byte] { }
+final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
+ protected def num = scala.math.Numeric.ByteIsIntegral
+ protected def ord = scala.math.Ordering.Byte
+}
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index ba939d6633..918fe70f5c 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -10,7 +10,10 @@ package scala.runtime
import java.lang.Character
-final class RichChar(val self: Char) extends IntegralProxy[Char] {
+final class RichChar(val self: Char) extends AnyVal with IntegralProxy[Char] {
+ protected def num = scala.math.Numeric.CharIsIntegral
+ protected def ord = scala.math.Ordering.Char
+
def asDigit: Int = Character.digit(self, Character.MAX_RADIX)
def isControl: Boolean = Character.isISOControl(self)
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index 396323d1e8..d7d2603ef7 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -6,10 +6,13 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-final class RichDouble(val self: Double) extends FractionalProxy[Double] {
- protected val integralNum = Numeric.DoubleAsIfIntegral
+final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] {
+ protected def num = scala.math.Numeric.DoubleIsFractional
+ protected def ord = scala.math.Ordering.Double
+ protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral
def round: Long = math.round(self)
def ceil: Double = math.ceil(self)
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index bb20ec61bb..b9289562f8 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -8,7 +8,7 @@
package scala.runtime
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
final class RichException(exc: Throwable) {
def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index 4fc9e8864a..9c3a14d3be 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -6,10 +6,13 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-final class RichFloat(val self: Float) extends FractionalProxy[Float] {
- protected val integralNum = Numeric.FloatAsIfIntegral
+final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] {
+ protected def num = scala.math.Numeric.FloatIsFractional
+ protected def ord = scala.math.Ordering.Float
+ protected def integralNum = scala.math.Numeric.FloatAsIfIntegral
def round: Int = math.round(self)
def ceil: Float = math.ceil(self).toFloat
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index d03968212f..619574264a 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -12,7 +12,9 @@ import scala.collection.immutable.Range
// Note that this does not implement IntegralProxy[Int] so that it can return
// the Int-specific Range class from until/to.
-final class RichInt(val self: Int) extends ScalaNumberProxy[Int] with RangedProxy[Int] {
+final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] with RangedProxy[Int] {
+ protected def num = scala.math.Numeric.IntIsIntegral
+ protected def ord = scala.math.Ordering.Int
type ResultWithoutStep = Range
/**
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index 5784934ffd..7c052851a9 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -8,7 +8,10 @@
package scala.runtime
-final class RichLong(val self: Long) extends IntegralProxy[Long] {
+final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
+ protected def num = scala.math.Numeric.LongIsIntegral
+ protected def ord = scala.math.Ordering.Long
+
def toBinaryString: String = java.lang.Long.toBinaryString(self)
def toHexString: String = java.lang.Long.toHexString(self)
def toOctalString: String = java.lang.Long.toOctalString(self)
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index a174438c06..4dfa237b38 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -8,4 +8,7 @@
package scala.runtime
-final class RichShort(val self: Short) extends ScalaWholeNumberProxy[Short] { }
+final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] {
+ protected def num = scala.math.Numeric.ShortIsIntegral
+ protected def ord = scala.math.Ordering.Short
+}
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index d9b9a7843f..df2d209e3e 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -9,7 +9,7 @@
package scala.runtime
import scala.collection.{ mutable, immutable }
-import math.ScalaNumericConversions
+import scala.math.ScalaNumericConversions
import immutable.NumericRange
import Proxy.Typed
@@ -20,9 +20,8 @@ import Proxy.Typed
* @version 2.9
* @since 2.9
*/
-abstract class ScalaNumberProxy[T: Numeric] extends ScalaNumericConversions with Typed[T] with OrderedProxy[T] {
- private val num = implicitly[Numeric[T]]
- protected val ord: Ordering[T] = num
+trait ScalaNumberProxy[T] extends Any with ScalaNumericConversions with Typed[T] with OrderedProxy[T] {
+ protected implicit def num: Numeric[T]
def underlying() = self.asInstanceOf[AnyRef]
def doubleValue() = num.toDouble(self)
@@ -35,11 +34,11 @@ abstract class ScalaNumberProxy[T: Numeric] extends ScalaNumericConversions with
def abs = num.abs(self)
def signum = num.signum(self)
}
-abstract class ScalaWholeNumberProxy[T: Numeric] extends ScalaNumberProxy[T] {
+trait ScalaWholeNumberProxy[T] extends Any with ScalaNumberProxy[T] {
def isWhole() = true
}
-abstract class IntegralProxy[T : Integral] extends ScalaWholeNumberProxy[T] with RangedProxy[T] {
- private lazy val num = implicitly[Integral[T]]
+trait IntegralProxy[T] extends Any with ScalaWholeNumberProxy[T] with RangedProxy[T] {
+ protected implicit def num: Integral[T]
type ResultWithoutStep = NumericRange[T]
def until(end: T): NumericRange.Exclusive[T] = NumericRange(self, end, num.one)
@@ -47,17 +46,17 @@ abstract class IntegralProxy[T : Integral] extends ScalaWholeNumberProxy[T] with
def to(end: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, num.one)
def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step)
}
-abstract class FractionalProxy[T : Fractional] extends ScalaNumberProxy[T] with RangedProxy[T] {
- def isWhole() = false
+trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] with RangedProxy[T] {
+ protected implicit def num: Fractional[T]
+ protected implicit def integralNum: Integral[T]
/** In order to supply predictable ranges, we require an Integral[T] which provides
* us with discrete operations on the (otherwise fractional) T. See Numeric.DoubleAsIfIntegral
* for an example.
*/
- protected implicit def integralNum: Integral[T]
- private lazy val num = implicitly[Fractional[T]]
type ResultWithoutStep = Range.Partial[T, NumericRange[T]]
+ def isWhole() = false
def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _))
def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step)
def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _))
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index e5f5e9dc5d..5c9e36450b 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
@@ -15,6 +16,7 @@ import scala.collection.generic.{ Sorted }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import scala.xml.{ Node, MetaData }
+import java.lang.{ Class => jClass }
import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
@@ -28,10 +30,10 @@ object ScalaRunTime {
def isArray(x: Any, atLevel: Int): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
- private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
+ private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
+ def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
def isTuple(x: Any) = x != null && tupleNames(x.getClass.getName)
def isAnyVal(x: Any) = x match {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
@@ -50,7 +52,7 @@ object ScalaRunTime {
/** Return the class object representing an array with element class `clazz`.
*/
- def arrayClass(clazz: Class[_]): Class[_] = {
+ def arrayClass(clazz: jClass[_]): jClass[_] = {
// newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
@@ -58,18 +60,19 @@ object ScalaRunTime {
/** Return the class object representing elements in arrays described by a given schematic.
*/
- def arrayElementClass(schematic: Any): Class[_] = schematic match {
- case cls: Class[_] => cls.getComponentType
+ def arrayElementClass(schematic: Any): jClass[_] = schematic match {
+ case cls: jClass[_] => cls.getComponentType
case tag: ClassTag[_] => tag.runtimeClass
- case _ => throw new UnsupportedOperationException("unsupported schematic %s (%s)".format(schematic, if (schematic == null) "null" else schematic.getClass))
+ case _ =>
+ throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
}
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
- def anyValClass[T <: AnyVal : ClassTag](value: T): Class[T] =
- classTag[T].runtimeClass.asInstanceOf[Class[T]]
+ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
+ classTag[T].runtimeClass.asInstanceOf[jClass[T]]
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = xs match {
@@ -144,7 +147,7 @@ object ScalaRunTime {
dest
}
- def toArray[T](xs: collection.Seq[T]) = {
+ def toArray[T](xs: scala.collection.Seq[T]) = {
val arr = new Array[AnyRef](xs.length)
var i = 0
for (x <- xs) {
@@ -167,35 +170,6 @@ object ScalaRunTime {
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
- abstract class Try[+A] {
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B
- def Finally(fin: => Unit): A
- }
-
- def Try[A](block: => A): Try[A] = new Try[A] with Runnable {
- private var result: A = _
- private var exception: Throwable =
- try { run() ; null }
- catch {
- case e: ControlThrowable => throw e // don't catch non-local returns etc
- case e: Throwable => e
- }
-
- def run() { result = block }
-
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B =
- if (exception == null) result
- else if (handler isDefinedAt exception) handler(exception)
- else throw exception
-
- def Finally(fin: => Unit): A = {
- fin
-
- if (exception == null) result
- else throw exception
- }
- }
-
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
@@ -234,12 +208,12 @@ object ScalaRunTime {
// Note that these are the implementations called by ##, so they
// must not call ## themselves.
- @inline def hash(x: Any): Int =
+ def hash(x: Any): Int =
if (x == null) 0
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
- @inline def hash(dv: Double): Int = {
+ def hash(dv: Double): Int = {
val iv = dv.toInt
if (iv == dv) return iv
@@ -249,7 +223,7 @@ object ScalaRunTime {
val fv = dv.toFloat
if (fv == dv) fv.hashCode else dv.hashCode
}
- @inline def hash(fv: Float): Int = {
+ def hash(fv: Float): Int = {
val iv = fv.toInt
if (iv == fv) return iv
@@ -257,29 +231,29 @@ object ScalaRunTime {
if (lv == fv) return hash(lv)
else fv.hashCode
}
- @inline def hash(lv: Long): Int = {
+ def hash(lv: Long): Int = {
val low = lv.toInt
val lowSign = low >>> 31
val high = (lv >>> 32).toInt
low ^ (high + lowSign)
}
- @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+ def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
// The remaining overloads are here for completeness, but the compiler
// inlines these definitions directly so they're not generally used.
- @inline def hash(x: Int): Int = x
- @inline def hash(x: Short): Int = x.toInt
- @inline def hash(x: Byte): Int = x.toInt
- @inline def hash(x: Char): Int = x.toInt
- @inline def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
- @inline def hash(x: Unit): Int = 0
+ def hash(x: Int): Int = x
+ def hash(x: Short): Int = x.toInt
+ def hash(x: Byte): Int = x.toInt
+ def hash(x: Char): Int = x.toInt
+ def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
+ def hash(x: Unit): Int = 0
/** A helper method for constructing case class equality methods,
* because existential types get in the way of a clean outcome and
* it's performing a series of Any/Any equals comparisons anyway.
* See ticket #2867 for specifics.
*/
- def sameElements(xs1: collection.Seq[Any], xs2: collection.Seq[Any]) = xs1 sameElements xs2
+ def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
/** Given any Scala value, convert it to a String.
*
@@ -346,7 +320,7 @@ object ScalaRunTime {
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x
case x if useOwnToString(x) => x.toString
case x: AnyRef if isArray(x) => arrayToString(x)
- case x: collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index 8ef1a9a33e..8cb958c05f 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -6,11 +6,12 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import java.util.Arrays.copyOfRange
-final class SeqCharSequence(val xs: collection.IndexedSeq[Char]) extends CharSequence {
+final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = xs.length
def charAt(index: Int): Char = xs(index)
def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(xs.slice(start, end))
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index a7e78ea9a3..f074b5407e 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -9,14 +9,6 @@
package scala.runtime
/** A wrapper class that adds string concatenation `+` to any value */
-final class StringAdd(val self: Any) {
-
- // Note: The implicit conversion from Any to StringAdd is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
-
+final class StringAdd(val self: Any) extends AnyVal {
def +(other: String) = String.valueOf(self) + other
-
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index c120cbb14d..7d34e82812 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -10,18 +10,10 @@ package scala.runtime
/** A wrapper class that adds a `formatted` operation to any value
*/
-final class StringFormat(val self: Any) {
-
- // Note: The implicit conversion from Any to StringFormat is one of two
- // implicit conversions from Any to AnyRef in Predef. It is important to have at least
- // two such conversions, so that silent conversions from value types to AnyRef
- // are avoided. If StringFormat should become a value class, another
- // implicit conversion from Any to AnyRef has to be introduced in Predef
-
+final class StringFormat(val self: Any) extends AnyVal {
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
* (@see java.lang.String.format).
*/
@inline def formatted(fmtstr: String): String = fmtstr format self
-
}
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index dce7eef08d..6030c9ea90 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -10,33 +10,32 @@ package scala.runtime
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
-import language.{ higherKinds, implicitConversions }
+import scala.language.{ higherKinds, implicitConversions }
/** This interface is intended as a minimal interface, not complicated
* by the requirement to resolve type constructors, for implicit search (which only
* needs to find an implicit conversion to Traversable for our purposes.)
*/
-trait ZippedTraversable2[+El1, +El2] {
+trait ZippedTraversable2[+El1, +El2] extends Any {
def foreach[U](f: (El1, El2) => U): Unit
}
object ZippedTraversable2 {
implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = {
- new collection.AbstractTraversable[(El1, El2)] {
+ new scala.collection.AbstractTraversable[(El1, El2)] {
def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
}
}
}
-class Tuple2Zipped[El1, Repr1, El2, Repr2](
- coll1: TraversableLike[El1, Repr1],
- coll2: IterableLike[El2, Repr2]
-) extends ZippedTraversable2[El1, El2] {
+final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] {
+ // This would be better as "private def coll1 = colls._1" but
+ // SI-6215 precludes private methods in value classes.
def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- b.sizeHint(coll1)
- val elems2 = coll2.iterator
+ val b = cbf(colls._1.repr)
+ b.sizeHint(colls._1)
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext)
b += f(el1, elems2.next)
else
@@ -47,10 +46,10 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext)
b ++= f(el1, elems2.next)
else
@@ -61,11 +60,11 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val elems2 = coll2.iterator
+ val b1 = cbf1(colls._1.repr)
+ val b2 = cbf2(colls._2.repr)
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext) {
val el2 = elems2.next
if (f(el1, el2)) {
@@ -80,9 +79,9 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
def exists(f: (El1, El2) => Boolean): Boolean = {
- val elems2 = coll2.iterator
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext) {
if (f(el1, elems2.next))
return true
@@ -96,9 +95,9 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
!exists((x, y) => !f(x, y))
def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = coll2.iterator
+ val elems2 = colls._2.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext)
f(el1, elems2.next)
else
@@ -108,24 +107,24 @@ class Tuple2Zipped[El1, Repr1, El2, Repr2](
}
object Tuple2Zipped {
- class Ops[T1, T2](x: (T1, T2)) {
+ final class Ops[T1, T2](val x: (T1, T2)) extends AnyVal {
def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That]
(implicit w1: T1 <:< CC1[El1],
w2: T2 <:< CC2[El2],
- bf: collection.generic.CanBuildFrom[CC1[_], (El1, El2), That]
+ bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2), That]
): That = {
val buf = bf(x._1)
val it1 = x._1.toIterator
val it2 = x._2.toIterator
while (it1.hasNext && it2.hasNext)
buf += ((it1.next, it2.next))
-
+
buf.result
}
def zipped[El1, Repr1, El2, Repr2]
(implicit w1: T1 => TraversableLike[El1, Repr1],
w2: T2 => IterableLike[El2, Repr2]
- ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped(x._1, x._2)
+ ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((x._1, x._2))
}
}
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index f3ca08649d..3970c9973d 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -10,31 +10,29 @@ package scala.runtime
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
-import language.{ higherKinds, implicitConversions }
+import scala.language.{ higherKinds, implicitConversions }
/** See comment on ZippedTraversable2. */
-trait ZippedTraversable3[+El1, +El2, +El3] {
+trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
def foreach[U](f: (El1, El2, El3) => U): Unit
}
object ZippedTraversable3 {
implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = {
- new collection.AbstractTraversable[(El1, El2, El3)] {
+ new scala.collection.AbstractTraversable[(El1, El2, El3)] {
def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
}
}
}
-class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
- coll1: TraversableLike[El1, Repr1],
- coll2: IterableLike[El2, Repr2],
- coll3: IterableLike[El3, Repr3]
-) extends ZippedTraversable3[El1, El2, El3] {
+final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3]))
+ extends AnyVal with ZippedTraversable3[El1, El2, El3] {
+
def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
b += f(el1, elems2.next, elems3.next)
else
@@ -44,11 +42,11 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
}
def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
b ++= f(el1, elems2.next, elems3.next)
else
@@ -61,14 +59,14 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
implicit cbf1: CBF[Repr1, El1, To1],
cbf2: CBF[Repr2, El2, To2],
cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val b3 = cbf3(coll3.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val b1 = cbf1(colls._1.repr)
+ val b2 = cbf2(colls._2.repr)
+ val b3 = cbf3(colls._3.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
def result = (b1.result, b2.result, b3.result)
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
val el2 = elems2.next
val el3 = elems3.next
@@ -86,10 +84,10 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
}
def exists(f: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
if (f(el1, elems2.next, elems3.next))
return true
@@ -103,10 +101,10 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
!exists((x, y, z) => !f(x, y, z))
def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
- for (el1 <- coll1) {
+ for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
f(el1, elems2.next, elems3.next)
else
@@ -116,12 +114,12 @@ class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](
}
object Tuple3Zipped {
- class Ops[T1, T2, T3](x: (T1, T2, T3)) {
+ final class Ops[T1, T2, T3](val x: (T1, T2, T3)) extends AnyVal {
def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That]
(implicit w1: T1 <:< CC1[El1],
w2: T2 <:< CC2[El2],
w3: T3 <:< CC3[El3],
- bf: collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That]
+ bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That]
): That = {
val buf = bf(x._1)
val it1 = x._1.toIterator
@@ -129,14 +127,14 @@ object Tuple3Zipped {
val it3 = x._3.toIterator
while (it1.hasNext && it2.hasNext && it3.hasNext)
buf += ((it1.next, it2.next, it3.next))
-
+
buf.result
}
-
+
def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
(implicit w1: T1 => TraversableLike[El1, Repr1],
w2: T2 => IterableLike[El2, Repr2],
w3: T3 => IterableLike[El3, Repr3]
- ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped(x._1, x._2, x._3)
+ ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped((x._1, x._2, x._3))
}
}
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
index 6f2a4d382d..a003bba034 100644
--- a/src/library/scala/runtime/WorksheetSupport.scala
+++ b/src/library/scala/runtime/WorksheetSupport.scala
@@ -40,9 +40,9 @@ object WorksheetSupport {
write((currentOffset+" ").getBytes)
}
out.write(c)
- col =
+ col =
if (c == '\n') -1
- else if (c == '\t') (col / tabInc) * tabInc + tabInc
+ else if (c == '\t') (col / tabInc) * tabInc + tabInc
else col + 1
if (col >= width) writeOne('\n')
}
@@ -86,7 +86,7 @@ object WorksheetSupport {
def $stop() = throw new StopException
- def $show(x: Any): String = stringOf(x, scala.Int.MaxValue)
+ def $show(x: Any): String = stringOf(x)
}
class StopException extends Exception
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index 761c7cb25e..d349b7e0c2 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -24,9 +24,9 @@ import Specializable._
*
* @since 2.8
*/
-// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation {
+// class tspecialized[T](group: Group[T]) extends scala.annotation.StaticAnnotation {
-class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation {
+class specialized(group: SpecializedGroup) extends scala.annotation.StaticAnnotation {
def this(types: Specializable*) = this(new Group(types.toList))
def this() = this(Primitives)
}
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index 45fc6f5897..7213fdeb65 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -8,7 +8,7 @@
package scala.sys
-import language.implicitConversions
+import scala.language.implicitConversions
/** A few additional conveniences for Boolean properties.
*/
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 687a32cf7d..123a729748 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A lightweight interface wrapping a property contained in some
* unspecified map. Generally it'll be the system properties but this
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index d5777922b4..5777c255c3 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -11,7 +11,7 @@ package scala.sys
import scala.collection.{ mutable, Iterator }
import scala.collection.JavaConverters._
import java.security.AccessControlException
-import language.implicitConversions
+import scala.language.implicitConversions
/** A bidirectional map wrapping the java System properties.
diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala
index 119ab59c22..445b30e480 100644
--- a/src/library/scala/sys/package.scala
+++ b/src/library/scala/sys/package.scala
@@ -9,7 +9,7 @@
package scala
import scala.collection.immutable
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
/** The package object `scala.sys` contains methods for reading
* and altering core aspects of the virtual machine as well as the
@@ -85,4 +85,4 @@ package object sys {
tarray take got
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 77e36f6196..94a2125393 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -45,7 +45,7 @@ object BasicIO {
val q = new LinkedBlockingQueue[Either[Int, T]]
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
- case Left(code) => if (nonzeroException) sys.error("Nonzero exit code: " + code) else Stream.empty
+ case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
case Right(s) => Stream.cons(s, next)
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index d56c6f2c9d..4950758a1a 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -11,7 +11,7 @@ package process
import processInternal._
import ProcessBuilder._
-import language.implicitConversions
+import scala.language.implicitConversions
/** Represents a process that is running or has finished running.
* It may be a compound process with several underlying native processes (such as `a #&& b`).
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 58f06e1039..2c83a59e4f 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -128,7 +128,7 @@ private[process] trait ProcessBuilderImpl {
val code = this ! BasicIO(withIn, buffer, log)
if (code == 0) buffer.toString
- else sys.error("Nonzero exit value: " + code)
+ else scala.sys.error("Nonzero exit value: " + code)
}
private[this] def lines(
@@ -213,4 +213,4 @@ private[process] trait ProcessBuilderImpl {
) extends SequentialBuilder(first, second, "###") {
override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io)
}
-} \ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index b7549eeb06..cdf7d72caa 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -84,7 +84,7 @@ private[process] trait ProcessImpl {
private[process] abstract class CompoundProcess extends BasicProcess {
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse sys.error("No exit code: process destroyed.")
+ def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
def start() = getExitValue
protected lazy val (getExitValue, destroyer) = {
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index c1bf470831..7c73fd587c 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -205,7 +205,7 @@ package scala.sys {
package object process extends ProcessImplicits {
/** The arguments passed to `java` when creating this process */
def javaVmArguments: List[String] = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
}
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
index 9acae34d4e..3794fb3f2b 100644
--- a/src/library/scala/testing/Benchmark.scala
+++ b/src/library/scala/testing/Benchmark.scala
@@ -8,7 +8,7 @@
package scala.testing
-import compat.Platform
+import scala.compat.Platform
/** `Benchmark` can be used to quickly turn an existing class into a
* benchmark. Here is a short example:
@@ -33,6 +33,7 @@ import compat.Platform
*
* @author Iulian Dragos, Burak Emir
*/
+@deprecated("This class will be removed.", "2.10.0")
trait Benchmark {
/** this method should be implemented by the concrete benchmark.
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index 5ab46b8985..da1868c7f6 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -25,6 +25,7 @@ package scala.testing
* where `&lt;result&gt;` is the result of evaluating the call.
*
*/
+@deprecated("This class will be removed.", "2.10.0")
trait Show {
/** An implicit definition that adds an apply method to Symbol which forwards to `test`.
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 4621c789ab..0aa0d31c9f 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -23,4 +23,4 @@ package scala
* @version 1.0, 19/05/2006
* @since 2.1
*/
-class throws(clazz: Class[_]) extends annotation.StaticAnnotation
+class throws(clazz: Class[_]) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala
index 3dcff0664c..36dcb996cf 100644
--- a/src/library/scala/transient.scala
+++ b/src/library/scala/transient.scala
@@ -10,7 +10,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
@field
-class transient extends annotation.StaticAnnotation
+class transient extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/unchecked.scala b/src/library/scala/unchecked.scala
index 5b05792d97..281f2ef4d7 100644
--- a/src/library/scala/unchecked.scala
+++ b/src/library/scala/unchecked.scala
@@ -33,4 +33,4 @@ package scala
*
* @since 2.4
*/
-class unchecked extends annotation.Annotation {}
+class unchecked extends scala.annotation.Annotation {}
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index dcfdc16d33..f0253eee07 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -10,7 +10,7 @@
package scala.util
-import language.implicitConversions
+import scala.language.implicitConversions
/** Represents a value of one of two possible types (a disjoint union.)
* Instances of Either are either an instance of [[scala.util.Left]] or [[scala.util.Right]].
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
index 2d3f54a95e..79476bdc16 100644
--- a/src/library/scala/util/Marshal.scala
+++ b/src/library/scala/util/Marshal.scala
@@ -37,8 +37,6 @@ object Marshal {
val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
val found = in.readObject.asInstanceOf[ClassTag[_]]
try {
- // [Eugene] needs review
- // previously was: found <:< expected
found.runtimeClass.asSubclass(expected.runtimeClass)
in.readObject.asInstanceOf[A]
} catch {
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index 029fe095af..c087b0d8c8 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -178,7 +178,7 @@ object MurmurHash {
* where the order of appearance of elements does not matter.
* This is useful for hashing sets, for example.
*/
- def symmetricHash[T](xs: collection.TraversableOnce[T], seed: Int) = {
+ def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = {
var a,b,n = 0
var c = 1
xs.seq.foreach(i => {
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 65a1b8c685..85ac27e95c 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -8,10 +8,10 @@
package scala.util
-import collection.mutable.ArrayBuffer
-import collection.generic.CanBuildFrom
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.{ List, Stream }
-import language.{implicitConversions, higherKinds}
+import scala.language.{implicitConversions, higherKinds}
/**
* @author Stephane Micheloud
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 5f0edf964f..276e157f55 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.reflect.{ ClassTag, classTag }
-import scala.math.Ordering
+import scala.math.{ Ordering, max, min }
/** The Sorting object provides functions that can sort various kinds of
* objects. You can provide a comparison function, or you can request a sort
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index f381a18b0c..fe409c2d7a 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -8,9 +8,9 @@
package scala.util
-import collection.Seq
+import scala.collection.Seq
import scala.util.control.NonFatal
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* The `Try` type represents a computation that may either result in an exception, or return a
@@ -52,6 +52,8 @@ import language.implicitConversions
* ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
* Serious system errors, on the other hand, will be thrown.
*
+ * ''Note:'': all Try combinators will catch exceptions and return failure unless otherwise specified in the documentation.
+ *
* `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack.
*
* @author based on Twitter's original implementation in com.twitter.util.
@@ -68,12 +70,19 @@ sealed abstract class Try[+T] {
def isSuccess: Boolean
/** Returns the value from this `Success` or the given `default` argument if this is a `Failure`.
+ *
+ * ''Note:'': This will throw an exception if it is not a success and default throws an exception.
*/
- def getOrElse[U >: T](default: => U) = if (isSuccess) get else default
+ def getOrElse[U >: T](default: => U): U =
+ if (isSuccess) get else default
/** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
*/
- def orElse[U >: T](default: => Try[U]) = if (isSuccess) this else default
+ def orElse[U >: T](default: => Try[U]): Try[U] =
+ try if (isSuccess) this else default
+ catch {
+ case NonFatal(e) => Failure(e)
+ }
/** Returns the value from this `Success` or throws the exception if this is a `Failure`.
*/
@@ -81,6 +90,8 @@ sealed abstract class Try[+T] {
/**
* Applies the given function `f` if this is a `Success`, otherwise returns `Unit` if this is a `Failure`.
+ *
+ * ''Note:'' If `f` throws, then this method may throw an exception.
*/
def foreach[U](f: T => U): Unit
@@ -114,7 +125,7 @@ sealed abstract class Try[+T] {
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
- def toOption = if (isSuccess) Some(get) else None
+ def toOption: Option[T] = if (isSuccess) Some(get) else None
/**
* Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
@@ -131,20 +142,25 @@ sealed abstract class Try[+T] {
/** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
* `s` if this is a `Success`.
*/
- def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] = this match {
- case Success(v) => s(v)
- case Failure(e) => f(e)
- }
+ def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ try this match {
+ case Success(v) => s(v)
+ case Failure(e) => f(e)
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
}
object Try {
-
- def apply[T](r: => T): Try[T] = {
- try { Success(r) } catch {
+ /** Constructs a `Try` using the by-name parameter. This
+ * method will ensure any non-fatal exception is caught and a
+ * `Failure` object is returned.
+ */
+ def apply[T](r: => T): Try[T] =
+ try Success(r) catch {
case NonFatal(e) => Failure(e)
}
- }
}
@@ -152,24 +168,25 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
def isFailure: Boolean = true
def isSuccess: Boolean = false
def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
- if (f.isDefinedAt(exception)) f(exception) else this
+ try {
+ if (f isDefinedAt exception) f(exception) else this
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
def get: T = throw exception
- def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
- def flatten[U](implicit ev: T <:< Try[U]): Try[U] = Failure[U](exception)
- def foreach[U](f: T => U): Unit = {}
- def map[U](f: T => U): Try[U] = Failure[U](exception)
+ def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ def foreach[U](f: T => U): Unit = ()
+ def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = {
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
try {
- if (rescueException.isDefinedAt(exception)) {
+ if (rescueException isDefinedAt exception) {
Try(rescueException(exception))
- } else {
- this
- }
+ } else this
} catch {
case NonFatal(e) => Failure(e)
}
- }
def failed: Try[Throwable] = Success(exception)
}
@@ -177,7 +194,7 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
final case class Success[+T](value: T) extends Try[T] {
def isFailure: Boolean = false
def isSuccess: Boolean = true
- def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = Success(value)
+ def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this
def get = value
def flatMap[U](f: T => Try[U]): Try[U] =
try f(value)
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 1cdcd734cd..25ac86183c 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -19,8 +19,8 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
- var indexMap = collection.Map[immutable.BitSet, Int]()
- var invIndexMap = collection.Map[Int, immutable.BitSet]()
+ var indexMap = scala.collection.Map[immutable.BitSet, Int]()
+ var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
var ix = 0
// we compute the dfa with states = bitsets
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index 2ee053c92b..28e4db2038 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -6,12 +6,13 @@
** |/ **
\* */
-package scala.util.control
+package scala.util
+package control
-import collection.immutable.List
-import reflect.{ ClassTag, classTag }
+import scala.collection.immutable.List
+import scala.reflect.{ ClassTag, classTag }
import java.lang.reflect.InvocationTargetException
-import language.implicitConversions
+import scala.language.implicitConversions
/** Classes representing the components of exception handling.
@@ -25,6 +26,10 @@ import language.implicitConversions
* val x2 = catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
* }}}
*
+ * This class differs from `scala.util.Try` in that it focuses on composing exception handlers rather than
+ * composing behavior. All behavior should be composed first and fed to a `Catch` object using one of the
+ * `opt` or `either` methods.
+ *
* @author Paul Phillips
*/
@@ -118,6 +123,11 @@ object Exception {
*/
def either[U >: T](body: => U): Either[Throwable, U] = toEither(Right(body))
+ /** Apply this catch logic to the supplied body, mapping the result
+ * into Try[T] - Failure if an exception was caught, Success(T) otherwise.
+ */
+ def withTry[U >: T](body: => U): scala.util.Try[U] = toTry(Success(body))
+
/** Create a `Catch` object with the same `isDefinedAt` logic as this one,
* but with the supplied `apply` method replacing the current one. */
def withApply[U](f: Throwable => U): Catch[U] = {
@@ -131,35 +141,11 @@ object Exception {
/** Convenience methods. */
def toOption: Catch[Option[T]] = withApply(_ => None)
def toEither: Catch[Either[Throwable, T]] = withApply(Left(_))
- }
-
- /** A container class for Try logic */
- class Try[+T] private[Exception](body: => T, val catcher: Catch[T]) {
- /** Execute "body" using catch/finally logic "catcher" */
- def apply(): T = catcher(body)
- def apply[U >: T](other: => U): U = catcher(other)
-
- /** As apply, but map caught exceptions to `None` and success to `Some(T)`. */
- def opt(): Option[T] = catcher opt body
- def opt[U >: T](other: => U): Option[U] = catcher opt other
-
- /** As apply, but map caught exceptions to `Left(ex)` and success to Right(x) */
- def either(): Either[Throwable, T] = catcher either body
- def either[U >: T](other: => U): Either[Throwable, U] = catcher either other
-
- /** Create a `Try` object with the supplied body replacing the current body. */
- def tryInstead[U >: T](other: => U) = new Try(other, catcher)
-
- /** Create a `Try` object with the supplied logic appended to the existing Catch logic. */
- def or[U >: T](pf: Catcher[U]) = new Try(body, catcher or pf)
-
- /** Create a `Try`object with the supplied code appended to the existing `Finally`. */
- def andFinally(fin: => Unit) = new Try(body, catcher andFinally fin)
-
- override def toString() = List("Try(<body>)", catcher.toString) mkString " "
+ def toTry: Catch[scala.util.Try[T]] = withApply(x => Failure(x))
}
final val nothingCatcher: Catcher[Nothing] = mkThrowableCatcher(_ => false, throw _)
+ final def nonFatalCatcher[T]: Catcher[T] = mkThrowableCatcher({ case NonFatal(_) => true; case _ => false }, throw _)
final def allCatcher[T]: Catcher[T] = mkThrowableCatcher(_ => true, throw _)
/** The empty `Catch` object. */
@@ -168,6 +154,9 @@ object Exception {
/** A `Catch` object which catches everything. */
final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "<everything>"
+ /** A `Catch` object witch catches non-fatal exceptions. */
+ final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "<non-fatal>"
+
/** Creates a `Catch` object which will catch any of the supplied exceptions.
* Since the returned `Catch` object has no specific logic defined and will simply
* rethrow the exceptions it catches, you will typically want to call `opt` or
@@ -229,7 +218,7 @@ object Exception {
}
/** Private **/
- private def wouldMatch(x: Throwable, classes: collection.Seq[Class[_]]): Boolean =
+ private def wouldMatch(x: Throwable, classes: scala.collection.Seq[Class[_]]): Boolean =
classes exists (_ isAssignableFrom x.getClass)
private def pfFromExceptions(exceptions: Class[_]*): PartialFunction[Throwable, Nothing] =
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index c2b5dbca22..4409358785 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A trait for exceptions which, for efficiency reasons, do not
* fill in the stack trace. Stack trace suppression can be disabled
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index 84b549f35e..97d32af2b0 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -8,6 +8,8 @@
package scala.util.hashing
+import scala.annotation.implicitNotFound
+
/** `Hashing` is a trait whose instances each represent a strategy for hashing
* instances of a type.
*
@@ -16,27 +18,22 @@ package scala.util.hashing
*
* Note: when using a custom `Hashing`, make sure to use it with the `Equiv`
* such that if any two objects are equal, then their hash codes must be equal.
- *
+ *
* @since 2.10
*/
-@annotation.implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
+@implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
trait Hashing[T] extends Serializable {
-
def hash(x: T): Int
-
}
-
object Hashing {
-
final class Default[T] extends Hashing[T] {
def hash(x: T) = x.##
}
-
+
implicit def default[T] = new Default[T]
-
+
def fromFunction[T](f: T => Int) = new Hashing[T] {
def hash(x: T) = f(x)
}
-
}
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 3efd5b5e72..8174f09bb2 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -157,6 +157,20 @@ private[hashing] class MurmurHash3 {
// Finalization
finalizeHash(h, data.length)
}
+
+ final def listHash(xs: scala.collection.immutable.List[_], seed: Int): Int = {
+ var n = 0
+ var h = seed
+ var elems = xs
+ while (!elems.isEmpty) {
+ val head = elems.head
+ val tail = elems.tail
+ h = mix(h, head.##)
+ n += 1
+ elems = tail
+ }
+ finalizeHash(h, n)
+ }
}
/**
@@ -199,41 +213,45 @@ object MurmurHash3 extends MurmurHash3 {
/** To offer some potential for optimization.
*/
- def seqHash(xs: collection.Seq[_]): Int = orderedHash(xs, seqSeed)
- def mapHash(xs: collection.Map[_, _]): Int = unorderedHash(xs, mapSeed)
- def setHash(xs: collection.Set[_]): Int = unorderedHash(xs, setSeed)
+ def seqHash(xs: scala.collection.Seq[_]): Int = xs match {
+ case xs: List[_] => listHash(xs, seqSeed)
+ case xs => orderedHash(xs, seqSeed)
+ }
+
+ def mapHash(xs: scala.collection.Map[_, _]): Int = unorderedHash(xs, mapSeed)
+ def setHash(xs: scala.collection.Set[_]): Int = unorderedHash(xs, setSeed)
class ArrayHashing[@specialized T] extends Hashing[Array[T]] {
def hash(a: Array[T]) = arrayHash(a)
}
-
+
def arrayHashing[@specialized T] = new ArrayHashing[T]
-
+
def bytesHashing = new Hashing[Array[Byte]] {
def hash(data: Array[Byte]) = bytesHash(data)
}
-
+
def orderedHashing = new Hashing[TraversableOnce[Any]] {
def hash(xs: TraversableOnce[Any]) = orderedHash(xs)
}
-
+
def productHashing = new Hashing[Product] {
def hash(x: Product) = productHash(x)
}
-
+
def stringHashing = new Hashing[String] {
def hash(x: String) = stringHash(x)
}
-
+
def unorderedHashing = new Hashing[TraversableOnce[Any]] {
def hash(xs: TraversableOnce[Any]) = unorderedHash(xs)
}
-
+
/** All this trouble and foreach still appears faster.
* Leaving in place in case someone would like to investigate further.
*/
/**
- def linearSeqHash(xs: collection.LinearSeq[_], seed: Int): Int = {
+ def linearSeqHash(xs: scala.collection.LinearSeq[_], seed: Int): Int = {
var n = 0
var h = seed
var elems = xs
@@ -245,7 +263,7 @@ object MurmurHash3 extends MurmurHash3 {
finalizeHash(h, n)
}
- def indexedSeqHash(xs: collection.IndexedSeq[_], seed: Int): Int = {
+ def indexedSeqHash(xs: scala.collection.IndexedSeq[_], seed: Int): Int = {
var n = 0
var h = seed
val len = xs.length
@@ -258,10 +276,10 @@ object MurmurHash3 extends MurmurHash3 {
*/
@deprecated("Use unorderedHash", "2.10.0")
- final def symmetricHash[T](xs: collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int =
+ final def symmetricHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int =
unorderedHash(xs.seq, seed)
@deprecated("Use orderedHash", "2.10.0")
- final def traversableHash[T](xs: collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int =
+ final def traversableHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int =
orderedHash(xs.seq, seed)
}
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
index 58284797b4..1d9a4deb62 100644
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ b/src/library/scala/util/logging/ConsoleLogger.scala
@@ -17,6 +17,7 @@ package scala.util.logging
* @author Burak Emir
* @version 1.0
*/
+@deprecated("This class will be removed.", "2.10.0")
trait ConsoleLogger extends Logged {
/** logs argument to Console using [[scala.Console.println]]
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
index d23b38c569..1476c8bf08 100644
--- a/src/library/scala/util/logging/Logged.scala
+++ b/src/library/scala/util/logging/Logged.scala
@@ -22,6 +22,7 @@ package scala.util.logging
* }}}
* and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
*/
+@deprecated("This class will be removed.", "2.10.0")
trait Logged {
/** This method should log the message given as argument somewhere
* as a side-effect.
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index b93c24fde4..fc3b36a4e0 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -10,7 +10,7 @@ package scala.util.parsing.ast
import scala.collection.AbstractIterable
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
//DISCLAIMER: this code is highly experimental!
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index 270ac680a9..5b616e9e13 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -9,7 +9,7 @@
package scala.util.parsing.combinator
-import language.implicitConversions
+import scala.language.implicitConversions
/** This object contains implicit conversions that come in handy when using the `^^` combinator.
*
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index 06567ea348..520ac8cc2c 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -9,7 +9,7 @@
package scala.util.parsing.combinator
-import annotation.migration
+import scala.annotation.migration
/** `JavaTokenParsers` differs from [[scala.util.parsing.combinator.RegexParsers]]
* by adding the following definitions:
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index 9516df0093..91642da229 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -11,7 +11,7 @@ package scala.util.parsing.combinator
import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* `PackratParsers` is a component that extends the parser combinators
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 66e0a496d8..5d990eee78 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -11,8 +11,8 @@ package scala.util.parsing.combinator
import scala.util.parsing.input._
import scala.collection.mutable.ListBuffer
import scala.annotation.tailrec
-import annotation.migration
-import language.implicitConversions
+import scala.annotation.migration
+import scala.language.implicitConversions
import scala.util.DynamicVariable
// TODO: better error handling (labelling like parsec's <?>)
@@ -155,14 +155,20 @@ trait Parsers {
val successful = true
}
- private lazy val lastNoSuccess = new DynamicVariable[Option[NoSuccess]](None)
+ private lazy val lastNoSuccessVar = new DynamicVariable[Option[NoSuccess]](None)
+
+ @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
+ def lastNoSuccess: NoSuccess = lastNoSuccessVar.value.orNull
+
+ @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
+ def lastNoSuccess_=(x: NoSuccess): Unit = lastNoSuccessVar.value = Option(x)
/** A common super-class for unsuccessful parse results. */
sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
val successful = false
- if (lastNoSuccess.value map { v => !(next.pos < v.next.pos) } getOrElse true)
- lastNoSuccess.value = Some(this)
+ if (lastNoSuccessVar.value forall (v => !(next.pos < v.next.pos)))
+ lastNoSuccessVar.value = Some(this)
def map[U](f: Nothing => U) = this
def mapPartial[U](f: PartialFunction[Nothing, U], error: Nothing => String): ParseResult[U] = this
@@ -172,7 +178,7 @@ trait Parsers {
def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
- def get: Nothing = sys.error("No result when parsing failed")
+ def get: Nothing = scala.sys.error("No result when parsing failed")
}
/** An extractor so `NoSuccess(msg, next)` can be used in matches. */
object NoSuccess {
@@ -881,14 +887,14 @@ trait Parsers {
* if `p` consumed all the input.
*/
def phrase[T](p: Parser[T]) = new Parser[T] {
- def apply(in: Input) = lastNoSuccess.withValue(None) {
+ def apply(in: Input) = lastNoSuccessVar.withValue(None) {
p(in) match {
case s @ Success(out, in1) =>
if (in1.atEnd)
s
else
- lastNoSuccess.value filterNot { _.next.pos < in1.pos } getOrElse Failure("end of input expected", in1)
- case ns => lastNoSuccess.value.getOrElse(ns)
+ lastNoSuccessVar.value filterNot { _.next.pos < in1.pos } getOrElse Failure("end of input expected", in1)
+ case ns => lastNoSuccessVar.value.getOrElse(ns)
}
}
}
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index d685329ef1..9a2c497eab 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -13,7 +13,7 @@ import java.util.regex.Pattern
import scala.util.matching.Regex
import scala.util.parsing.input._
import scala.collection.immutable.PagedSeq
-import language.implicitConversions
+import scala.language.implicitConversions
/** The ''most important'' differences between `RegexParsers` and
* [[scala.util.parsing.combinator.Parsers]] are:
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 215b8b792f..03979d43b7 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -13,7 +13,7 @@ package syntactical
import token._
import lexical.StdLexical
-import language.implicitConversions
+import scala.language.implicitConversions
/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 7aa6178df9..a3b94e2562 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -14,7 +14,7 @@ package syntactical
import token._
import scala.collection.mutable
-import language.implicitConversions
+import scala.language.implicitConversions
/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
index ff3554a6af..80e9b0df39 100644
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
@@ -3,7 +3,7 @@ package scala.util.parsing.combinator.testing
import scala.util.parsing.combinator._
import scala.util.parsing.input._
-import language.postfixOps
+import scala.language.postfixOps
@deprecated("This class will be removed", "2.10.0")
case class Ident(s: String)
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 57a2c9c4c2..3366584ab2 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -8,7 +8,7 @@
package scala.util.parsing.input
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
/** `OffsetPosition` is a standard class for positions
* represented as offsets into a source ``document''.
diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala
index 88726d9336..1290e54f3a 100644
--- a/src/library/scala/volatile.scala
+++ b/src/library/scala/volatile.scala
@@ -10,7 +10,7 @@
package scala
-import annotation.meta._
+import scala.annotation.meta._
@field
-class volatile extends annotation.StaticAnnotation
+class volatile extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index f140fd1e07..2ca1dbfcd0 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -73,7 +73,7 @@ extends Node with Serializable
throw new IllegalArgumentException("prefix of zero length, use null instead")
if (scope == null)
- throw new IllegalArgumentException("scope is null, use xml.TopScope for empty scope")
+ throw new IllegalArgumentException("scope is null, use scala.xml.TopScope for empty scope")
//@todo: copy the children,
// setting namespace scope if necessary
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
index 07651adb90..0efbb4c511 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/library/scala/xml/Equality.scala
@@ -13,7 +13,7 @@ package scala.xml
* all the `xml` classes go through the `xml.Equality trait`. There are two
* forms of `xml` comparison.
*
- * 1. `'''def''' strict_==(other: xml.Equality)`
+ * 1. `'''def''' strict_==(other: scala.xml.Equality)`
*
* This one tries to honor the little things like symmetry and hashCode
* contracts. The `equals` method routes all comparisons through this.
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index e98ec90aca..15b3cb6d4a 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -9,7 +9,7 @@
package scala.xml
import Utility.sbToString
-import annotation.tailrec
+import scala.annotation.tailrec
import scala.collection.{ AbstractIterable, Iterator }
/**
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index 40ddc7d85c..e50e68d4fd 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -8,10 +8,10 @@
package scala.xml
-import collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
+import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
import mutable.{ Builder, ListBuffer }
import generic.{ CanBuildFrom }
-import language.implicitConversions
+import scala.language.implicitConversions
/** This object ...
*
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index bae529c85c..50a284d7cd 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -10,7 +10,7 @@ package scala.xml
import scala.collection.mutable
import parsing.XhtmlEntities
-import language.implicitConversions
+import scala.language.implicitConversions
/**
* The `Utility` object provides utility functions for processing instances
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index a5d2a6bd7e..f98aff5709 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -11,9 +11,9 @@
package scala.xml
package dtd
-import util.regexp.WordExp
-import util.automata._
-import Utility.sbToString
+import scala.util.regexp.WordExp
+import scala.util.automata._
+import scala.xml.Utility.sbToString
import PartialFunction._
object ContentModel extends WordExp {
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 2d87bc0764..5d183df04b 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.xml
package dtd
@@ -21,10 +20,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def accept(tok: Int) = {
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
- sys.error("in DTDs, \n"+
+ scala.sys.error("in DTDs, \n"+
"mixed content models must be like (#PCDATA|Name|Name|...)*");
else
- sys.error("expected "+token2string(tok)+
+ scala.sys.error("expected "+token2string(tok)+
", got unexpected token:"+token2string(token));
}
nextToken
@@ -45,7 +44,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
}
case LPAREN =>
@@ -65,12 +64,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
accept( STAR );
res
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
}
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
// sopt ::= S?
def sOpt() = if( token == S ) nextToken;
@@ -118,12 +117,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def particle = token match {
case LPAREN => nextToken; sOpt; regexp;
case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => sys.error("expected '(' or Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
}
// atom ::= name
def atom = token match {
case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => sys.error("expected Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected Name, got:"+token2string(token));
}
}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 82a8d1af2f..2e753a7590 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -44,7 +44,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
}
final def accS(ds: Seq[Char]) { ds foreach acc }
@@ -65,7 +65,7 @@ class Scanner extends Tokens with parsing.TokenTests {
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
- else sys.error("unexpected character:" + c)
+ else scala.sys.error("unexpected character:" + c)
}
final def name = {
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 61d4855b2e..c543b8751b 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -18,7 +18,7 @@ trait NodeFactory[A <: Node] {
val ignoreProcInstr = false
/* default behaviour is to use hash-consing */
- val cache = new collection.mutable.HashMap[Int, List[A]]
+ val cache = new scala.collection.mutable.HashMap[Int, List[A]]
protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index f4d69ffe44..2af66f4f16 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -62,7 +62,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
val value = atts.getValue(i);
// @todo Need to use character references if the encoding
// can't support the character
- out.write(xml.Utility.escape(value))
+ out.write(scala.xml.Utility.escape(value))
out.write("'");
i += 1
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index af9b5f47cf..d4dc6da14d 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -56,7 +56,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
// See ticket #3720 for motivations.
private class WithLookAhead(underlying: Source) extends Source {
- private val queue = collection.mutable.Queue[Char]()
+ private val queue = scala.collection.mutable.Queue[Char]()
def lookahead(): BufferedIterator[Char] = {
val iter = queue.iterator ++ new Iterator[Char] {
def hasNext = underlying.hasNext
@@ -897,7 +897,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
new PublicID(pubID, sysID)
} else {
reportSyntaxError("PUBLIC or SYSTEM expected");
- sys.error("died parsing notationdecl")
+ scala.sys.error("died parsing notationdecl")
}
xSpaceOpt
xToken('>')
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index 096f8a8f38..219c3d6679 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -21,7 +21,7 @@ import Utility.SU
* All members should be accessed through those.
*/
private[scala] trait MarkupParserCommon extends TokenTests {
- protected def unreachable = sys.error("Cannot be reached.")
+ protected def unreachable = scala.sys.error("Cannot be reached.")
// type HandleType // MarkupHandler, SymbolicXMLBuilder
type InputType // Source, CharArrayReader
@@ -82,7 +82,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
case `end` => return buf.toString
case ch => buf append ch
}
- sys.error("Expected '%s'".format(end))
+ scala.sys.error("Expected '%s'".format(end))
}
/** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index c764d042c8..07fab27957 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -24,7 +24,7 @@ import scala.xml.parsing.{ ExternalSources, MarkupHandler, MarkupParser }
* @author Paul Phillips
*/
class XMLEventReader(src: Source)
-extends collection.AbstractIterator[XMLEvent]
+extends scala.collection.AbstractIterator[XMLEvent]
with ProducerConsumerIterator[XMLEvent] {
// We implement a pull parser as an iterator, but since we may be operating on
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index a1450ee876..6df0cec7fe 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -5,7 +5,7 @@
package scala.tools.partest
-import scala.reflect.{basis => rb}
+import scala.reflect.runtime.{universe => ru}
import scala.tools.nsc._
/** For testing compiler internals directly.
@@ -34,7 +34,7 @@ abstract class CompilerTest extends DirectTest {
// Utility functions
class MkType(sym: Symbol) {
- def apply[M](implicit t: rb.TypeTag[M]): Type =
+ def apply[M](implicit t: ru.TypeTag[M]): Type =
if (sym eq NoSymbol) NoType
else appliedType(sym, compilerTypeFromTag(t))
}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index ff047daf9e..af2fc986fa 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -37,7 +37,7 @@ abstract class DirectTest extends App {
}
// new compiler
def newCompiler(args: String*): Global = {
- val settings = newSettings((CommandLineParser tokenize extraSettings) ++ args.toList)
+ val settings = newSettings((CommandLineParser tokenize ("-d \"" + testOutput.path + "\" " + extraSettings)) ++ args.toList)
if (settings.Yrangepos.value) new Global(settings) with interactive.RangePositions
else new Global(settings)
}
diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala
new file mode 100644
index 0000000000..58d25d2f01
--- /dev/null
+++ b/src/partest/scala/tools/partest/MemoryTest.scala
@@ -0,0 +1,38 @@
+package scala.tools.partest
+
+abstract class MemoryTest {
+ def maxDelta: Double
+ def calcsPerIter: Int
+ def calc(): Unit
+
+ def main(args: Array[String]) {
+ val rt = Runtime.getRuntime()
+ def memUsage() = {
+ import java.lang.management._
+ import scala.collection.JavaConverters._
+ val pools = ManagementFactory.getMemoryPoolMXBeans.asScala
+ pools.map(_.getUsage.getUsed).sum / 1000000d
+ }
+
+ val history = scala.collection.mutable.ListBuffer[Double]()
+ def stressTestIter() = {
+ var i = 0
+ while (i < calcsPerIter) { calc(); i += 1 }
+ 1 to 5 foreach (_ => rt.gc())
+ history += memUsage
+ }
+
+ 1 to 5 foreach (_ => stressTestIter())
+ val reference = memUsage()
+ 1 to 5 foreach (_ => stressTestIter())
+ 1 to 5 foreach (_ => rt.gc())
+ val result = memUsage()
+ history += result
+
+ val delta = result - reference
+ if (delta > maxDelta) {
+ println("FAILED")
+ history foreach (mb => println(mb + " Mb"))
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
index 73a7b92778..b27ce6ff75 100644
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -2,7 +2,7 @@ package scala.tools
package partest
import nsc.io.{ File, Path, Directory }
-import util.{ PathResolver }
+import scala.tools.util.PathResolver
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
import java.lang.Runtime.getRuntime
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
index b86a8e2c7f..9bfd444180 100644
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ b/src/partest/scala/tools/partest/TestUtil.scala
@@ -1,5 +1,7 @@
package scala.tools.partest
+import scala.reflect.{ classTag, ClassTag }
+
trait TestUtil {
/** Given function and block of code, evaluates code block,
* calls function with nanoseconds elapsed, and returns block result.
@@ -29,8 +31,16 @@ trait TestUtil {
assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult)
}
+
+ def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
+ try {
+ code
+ assert(false, "did not throw " + classTag[T])
+ } catch {
+ case ex: Exception if classTag[T].runtimeClass isInstance ex =>
+ }
}
object TestUtil extends TestUtil {
-} \ No newline at end of file
+}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index 604e53e64c..0f2806214f 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -9,6 +9,7 @@ package scala.tools.partest
package nest
import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
+import scala.tools.nsc.io.{ File => SFile }
import scala.tools.nsc.interactive.RangePositions
import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
import scala.tools.nsc.util.{ ClassPath, FakePos }
@@ -94,7 +95,17 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
val logFile = basename(log.getName)
val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
- val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0)
+
+ // slurp local flags (e.g., "A_1.flags")
+ val fstFile = SFile(files(0))
+ def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num)
+ val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List())
+ val localFlagsList = if (inGroup.nonEmpty) {
+ val localArgString = (fstFile.parent / (fstFile.stripExtension + ".flags")) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
+ localArgString.split(' ').toList.filter(_.length > 0)
+ } else List()
+
+ val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0) ::: localFlagsList
val args = allOpts.toList
NestUI.verbose("scalac options: "+allOpts)
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index a07705322d..a890a57f14 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -37,6 +37,8 @@ trait DirectRunner {
})
}
def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
+ System.setProperty("line.separator", "\n")
+
// @partest maintainer: we cannot create a fresh file manager here
// since the FM must respect --buildpath and --classpath from the command line
// for example, see how it's done in ReflectiveRunner
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 064b82da85..512c718040 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -13,7 +13,7 @@ import java.io.{File, FilenameFilter, IOException, StringWriter,
FileReader, PrintWriter, FileWriter}
import java.net.URI
import scala.tools.nsc.io.{ Path, Directory, File => SFile }
-import sys.process._
+import scala.sys.process._
import scala.collection.mutable
trait FileUtil {
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
index 20d61d0831..7a42853749 100644
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -138,12 +138,29 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n", CompilerCrashed)
}
- /** Runs command redirecting standard out and
- * error out to output file.
+ /** Runs command redirecting standard out and error out to output file.
+ * Overloaded to accept a sequence of arguments.
*/
private def runCommand(args: Seq[String], outFile: File): Boolean = {
NestUI.verbose("running command:\n"+args.map(" " + _ + "\n").mkString)
- (Process(args) #> outFile !) == 0
+ runCommandImpl(Process(args), outFile)
+ }
+
+ /** Runs command redirecting standard out and error out to output file.
+ * Overloaded to accept a single string = concatenated command + arguments.
+ */
+ private def runCommand(command: String, outFile: File): Boolean = {
+ NestUI.verbose("running command:"+command)
+ runCommandImpl(Process(command), outFile)
+ }
+
+ private def runCommandImpl(process: => ProcessBuilder, outFile: File): Boolean = {
+ val exitCode = (process #> outFile !)
+ // normalize line endings
+ // System.getProperty("line.separator") should be "\n" here
+ // so reading a file and writing it back should convert all CRLFs to LFs
+ SFile(outFile).printlnAll(SFile(outFile).lines.toList: _*)
+ exitCode == 0
}
@inline private def isJava(f: File) = SFile(f) hasExtension "java"
@@ -217,6 +234,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
"-Dpartest.output="+outDir.getAbsolutePath,
"-Dpartest.lib="+LATEST_LIB,
"-Dpartest.reflect="+LATEST_REFLECT,
+ "-Dpartest.comp="+LATEST_COMP,
"-Dpartest.cwd="+outDir.getParent,
"-Dpartest.test-path="+testFullPath,
"-Dpartest.testname="+fileBase,
@@ -312,8 +330,8 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
val testFiles = dir.listFiles.toList filter isJavaOrScala
def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
- val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
- val noGroupSuffix = testFiles filterNot (groups.flatten contains)
+ val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted)
+ val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted
noGroupSuffix :: groups filterNot (_.isEmpty)
}
@@ -766,7 +784,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
}
else file.getAbsolutePath
- val ok = ((cmdString+argString) #> logFile !) == 0
+ val ok = runCommand(cmdString+argString, logFile)
( ok && diffCheck(file, compareOutput(file.getParentFile, logFile)) )
}
catch { case e: Exception => NestUI.verbose("caught "+e) ; false }
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 266153d9d3..206ee19c76 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -4,7 +4,7 @@ package nest
import java.io.File
import scala.tools.nsc.io.{ Directory }
import scala.util.Properties.setProp
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
object SBTRunner extends DirectRunner {
@@ -34,7 +34,7 @@ object SBTRunner extends DirectRunner {
scalacOptions: Seq[String] = Seq(),
justFailedTests: Boolean = false)
- def mainReflect(args: Array[String]): java.util.Map[String, TestState] = {
+ def mainReflect(args: Array[String]): java.util.Map[String, String] = {
setProp("partest.debug", "true")
val Argument = new scala.util.matching.Regex("-(.*)")
@@ -73,9 +73,13 @@ object SBTRunner extends DirectRunner {
(for {
(testType, files) <- runs
(path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
- } yield (path, result)).seq.asJava
+ } yield (path, fixResult(result))).seq.asJava
+ }
+ def fixResult(result: TestState): String = result match {
+ case TestState.Ok => "OK"
+ case TestState.Fail => "FAIL"
+ case TestState.Timeout => "TIMEOUT"
}
-
def main(args: Array[String]): Unit = {
val failures = (
for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index 08934ef143..df1c296d47 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -6,7 +6,7 @@ package scala.tools
import java.io.{ FileNotFoundException, File => JFile }
import nsc.io.{ Path, Directory, File => SFile }
-import util.{ PathResolver }
+import scala.tools.util.PathResolver
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
import scala.sys.process.javaVmArguments
import java.util.concurrent.Callable
@@ -30,8 +30,8 @@ package object partest {
implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
- implicit lazy val postfixOps = language.postfixOps
- implicit lazy val implicitConversions = language.implicitConversions
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
def timed[T](body: => T): (T, Long) = {
val t1 = System.currentTimeMillis
@@ -62,7 +62,7 @@ package object partest {
)
def allPropertiesString = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
}
@@ -73,4 +73,54 @@ package object partest {
def isPartestDebug: Boolean =
propOrEmpty("partest.debug") == "true"
+
+
+ import scala.language.experimental.macros
+
+ /**
+ * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+ * {{{
+ * trace> "".isEmpty
+ * res: Boolean = true
+ *
+ * }}}
+ *
+ * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+ * test code in a string.
+ */
+ def trace[A](a: A) = macro traceImpl[A]
+
+ import scala.reflect.macros.Context
+ def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+ import c.universe._
+ import definitions._
+
+ // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
+ // because this impairs reflection refactorings
+ //
+ // val exprCode = c.literal(show(a.tree))
+ // val exprType = c.literal(show(a.actualType))
+ // reify {
+ // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+ // a.splice
+ // }
+
+ c.Expr(Block(
+ List(Apply(
+ Select(Ident(PredefModule), newTermName("println")),
+ List(Apply(
+ Select(Apply(
+ Select(Ident(ScalaPackage), newTermName("StringContext")),
+ List(
+ Literal(Constant("trace> ")),
+ Literal(Constant("\\nres: ")),
+ Literal(Constant(" = ")),
+ Literal(Constant("\\n")))),
+ newTermName("s")),
+ List(
+ Literal(Constant(show(a.tree))),
+ Literal(Constant(show(a.actualType))),
+ a.tree))))),
+ a.tree))
+ }
}
diff --git a/src/reflect/scala/reflect/api/AnnotationInfos.scala b/src/reflect/scala/reflect/api/AnnotationInfos.scala
deleted file mode 100644
index d9f35024d9..0000000000
--- a/src/reflect/scala/reflect/api/AnnotationInfos.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala.reflect
-package api
-
-trait AnnotationInfos extends base.AnnotationInfos { self: Universe =>
-
- override type AnnotationInfo >: Null <: AnyRef with AnnotationInfoApi
- trait AnnotationInfoApi {
- def atp: Type
- def args: List[Tree]
- def assocs: List[(Name, ClassfileAnnotArg)]
- }
-
- override type LiteralAnnotArg >: Null <: ClassfileAnnotArg with LiteralAnnotArgApi
- trait LiteralAnnotArgApi {
- def const: Constant
- }
-
- override type ArrayAnnotArg >: Null <: ClassfileAnnotArg with ArrayAnnotArgApi
- trait ArrayAnnotArgApi {
- def args: Array[ClassfileAnnotArg]
- }
-
- override type NestedAnnotArg >: Null <: ClassfileAnnotArg with NestedAnnotArgApi
- trait NestedAnnotArgApi {
- def annInfo: AnnotationInfo
- }
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
new file mode 100644
index 0000000000..37882a9f3c
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -0,0 +1,124 @@
+package scala.reflect
+package api
+
+import scala.collection.immutable.ListMap
+
+/**
+ * Defines the type hierarchy for annotations.
+ */
+trait Annotations { self: Universe =>
+
+ /** Typed information about an annotation. It can be attached to either a symbol or an annotated type.
+ *
+ * Annotations are either ''Scala annotations'', which conform to [[scala.annotation.StaticAnnotation]]
+ * or ''Java annotations'', which conform to [[scala.annotation.ClassfileAnnotation]].
+ * Trait `ClassfileAnnotation` is automatically added to every Java annotation by the scalac classfile parser.
+ */
+ type Annotation >: Null <: AnyRef with AnnotationApi
+
+ /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AnnotationTag: ClassTag[Annotation]
+
+ /** The constructor/deconstructor for `Annotation` instances. */
+ val Annotation: AnnotationExtractor
+
+ /** An extractor class to create and pattern match with syntax `Annotation(atp, scalaArgs, javaArgs)`.
+ * Here, `atp` is the annotation type, `scalaArgs` the arguments, and `javaArgs` the annotation's key-value
+ * pairs.
+ *
+ * Annotations are pickled, i.e. written to scala symtab attribute in the classfile.
+ * Annotations are written to the classfile as Java annotations if `atp` conforms to `ClassfileAnnotation`.
+ *
+ * For Scala annotations, arguments are stored in `scalaArgs` and `javaArgs` is empty. Arguments in
+ * `scalaArgs` are represented as typed trees. Note that these trees are not transformed by any phases
+ * following the type-checker. For Java annotations, `scalaArgs` is empty and arguments are stored in
+ * `javaArgs`.
+ */
+ abstract class AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
+ def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
+ }
+
+ trait AnnotationApi {
+ def tpe: Type
+ def scalaArgs: List[Tree]
+ def javaArgs: ListMap[Name, JavaArgument]
+ }
+
+ /** A Java annotation argument */
+ type JavaArgument >: Null <: AnyRef
+ implicit val JavaArgumentTag: ClassTag[JavaArgument]
+
+ /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`*/
+ type LiteralArgument >: Null <: AnyRef with JavaArgument with LiteralArgumentApi
+
+ /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+
+ /** The constructor/deconstructor for `LiteralArgument` instances. */
+ val LiteralArgument: LiteralArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
+ * where `value` is the constant argument.
+ */
+ abstract class LiteralArgumentExtractor {
+ def apply(value: Constant): LiteralArgument
+ def unapply(arg: LiteralArgument): Option[Constant]
+ }
+
+ trait LiteralArgumentApi {
+ def value: Constant
+ }
+
+ /** An array argument to a Java annotation as in `@Target(value={TYPE,FIELD,METHOD,PARAMETER})`
+ */
+ type ArrayArgument >: Null <: AnyRef with JavaArgument with ArrayArgumentApi
+
+ /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+
+ /** The constructor/deconstructor for `ArrayArgument` instances. */
+ val ArrayArgument: ArrayArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
+ * where `args` is the argument array.
+ */
+ abstract class ArrayArgumentExtractor {
+ def apply(args: Array[JavaArgument]): ArrayArgument
+ def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
+ }
+
+ trait ArrayArgumentApi {
+ def args: Array[JavaArgument]
+ }
+
+ /** A nested annotation argument to a Java annotation as `@Nested` in `@Outer(@Nested)`.
+ */
+ type NestedArgument >: Null <: AnyRef with JavaArgument with NestedArgumentApi
+
+ /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+ /** The constructor/deconstructor for `NestedArgument` instances. */
+ val NestedArgument: NestedArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
+ * where `annotation` is the nested annotation.
+ */
+ abstract class NestedArgumentExtractor {
+ def apply(annotation: Annotation): NestedArgument
+ def unapply(arg: NestedArgument): Option[Annotation]
+ }
+
+ trait NestedArgumentApi {
+ def annotation: Annotation
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Attachments.scala b/src/reflect/scala/reflect/api/Attachments.scala
new file mode 100644
index 0000000000..edbb0131ca
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Attachments.scala
@@ -0,0 +1,50 @@
+package scala.reflect
+package api
+
+/** Attachments is a generalization of Position. Typically it stores a Position of a tree, but this can be extended to
+ * encompass arbitrary payloads. Payloads are stored in type-indexed slots, which can be read with `get[T]` and written
+ * with `update[T]` and `remove[T]`.
+ *
+ * Attachments always carry positions because we don't want to introduce an additional field for attachments in `Tree`
+ * imposing an unnecessary memory tax because of something that will not be used in most cases.
+ */
+abstract class Attachments { self =>
+
+ /** The position type of this attachment */
+ type Pos >: Null
+
+ /** The underlying position */
+ def pos: Pos
+
+ /** Creates a copy of this attachment with the position replaced by `newPos` */
+ def withPos(newPos: Pos): Attachments { type Pos = self.Pos }
+
+ /** The underlying payload with the guarantee that no two elements have the same type. */
+ def all: Set[Any] = Set.empty
+
+ private def matchesTag[T: ClassTag](datum: Any) =
+ classTag[T].runtimeClass == datum.getClass
+
+ /** An underlying payload of the given class type `T`. */
+ def get[T: ClassTag]: Option[T] =
+ (all filter matchesTag[T]).headOption.asInstanceOf[Option[T]]
+
+ /** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
+ *
+ * Replaces an existing payload of the same type, if exists.
+ */
+ def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
+ new NonemptyAttachments(this.pos, remove[T].all + attachment)
+
+ /** Creates a copy of this attachment with the payload of the given class type `T` removed. */
+ def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
+ val newAll = all filterNot matchesTag[T]
+ if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
+ else new NonemptyAttachments(this.pos, newAll)
+ }
+
+ private class NonemptyAttachments(override val pos: Pos, override val all: Set[Any]) extends Attachments {
+ type Pos = self.Pos
+ def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
+ }
+}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
new file mode 100644
index 0000000000..b0de7f8d5a
--- /dev/null
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -0,0 +1,75 @@
+package scala.reflect
+package api
+
+/**
+ * This is an internal implementation class.
+ */
+private[reflect] trait BuildUtils { self: Universe =>
+
+ val build: BuildApi
+
+ // this API abstracts away the functionality necessary for reification
+ // it's too gimmicky and unstructured to be exposed directly in the universe
+ // but we need it in a publicly available place for reification to work
+
+ abstract class BuildApi {
+ /** Selects type symbol with given simple name `name` from the defined members of `owner`.
+ */
+ def selectType(owner: Symbol, name: String): TypeSymbol
+
+ /** Selects term symbol with given name and type from the defined members of prefix type
+ */
+ def selectTerm(owner: Symbol, name: String): TermSymbol
+
+ /** Selects overloaded method symbol with given name and index
+ */
+ def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol
+
+ /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
+ * the current symbol as its owner.
+ */
+ def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: FlagSet, isClass: Boolean): Symbol
+
+ /** Create a fresh free term symbol.
+ * @param name the name of the free variable
+ * @param value the value of the free variable at runtime
+ * @param flags (optional) flags of the free variable
+ * @param origin debug information that tells where this symbol comes from
+ */
+ def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
+
+ /** Create a fresh free type symbol.
+ * @param name the name of the free variable
+ * @param flags (optional) flags of the free variable
+ * @param origin debug information that tells where this symbol comes from
+ */
+ def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
+
+ /** Set symbol's type signature to given type.
+ * @return the symbol itself
+ */
+ def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S
+
+ /** Set symbol's annotations to given annotations `annots`.
+ */
+ def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
+
+ def flagsFromBits(bits: Long): FlagSet
+
+ def emptyValDef: ValDef
+
+ def This(sym: Symbol): Tree
+
+ def Select(qualifier: Tree, sym: Symbol): Select
+
+ def Ident(sym: Symbol): Ident
+
+ def TypeTree(tp: Type): TypeTree
+
+ def thisPrefix(sym: Symbol): Type
+
+ def setType[T <: Tree](tree: T, tpe: Type): T
+
+ def setSymbol[T <: Tree](tree: T, sym: Symbol): T
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index 6657245003..f2d8ef2eb9 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -6,10 +6,33 @@
package scala.reflect
package api
-trait Constants extends base.Constants {
+/**
+ * Defines the type hierachy for compile-time constants.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
+trait Constants {
self: Universe =>
- override type Constant >: Null <: AnyRef with ConstantApi
+ /** The type of compile-time constants.
+ */
+ type Constant >: Null <: AnyRef with ConstantApi
+
+ /** A tag that preserves the identity of the `Constant` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ConstantTag: ClassTag[Constant]
+
+ /** The constructor/deconstructor for `Constant` instances. */
+ val Constant: ConstantExtractor
+
+ /** An extractor class to create and pattern match with syntax `Constant(value)`
+ * where `value` is the Scala value of the constant.
+ */
+ abstract class ConstantExtractor {
+ def apply(value: Any): Constant
+ def unapply(arg: Constant): Option[Any]
+ }
abstract class ConstantApi {
val value: Any
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
new file mode 100644
index 0000000000..b86f36420d
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -0,0 +1,143 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package api
+
+import scala.reflect.runtime.{universe => ru}
+
+trait Exprs { self: Universe =>
+
+ /** Expr wraps an expression tree and tags it with its type. */
+ trait Expr[+T] extends Equals with Serializable {
+ /**
+ * Underlying mirror of this expr.
+ */
+ val mirror: Mirror
+
+ /**
+ * Migrates the expression into another mirror, jumping into a different universe if necessary.
+ *
+ * This means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # Expr[T]
+
+ /**
+ * The Scala syntax tree representing the wrapped expression.
+ */
+ def tree: Tree
+
+ /**
+ * Representation of the type of the wrapped expression tree as found via type tags.
+ */
+ def staticType: Type
+ /**
+ * Representation of the type of the wrapped expression tree as found in the tree.
+ */
+ def actualType: Type
+
+ /**
+ * A dummy method to mark expression splicing in reification.
+ *
+ * It should only be used within a `reify` call, which eliminates the `splice` call and embeds
+ * the wrapped tree into the reified surrounding expression.
+ * If used alone `splice` throws an exception when called at runtime.
+ *
+ * If you want to use an Expr in reification of some Scala code, you need to splice it in.
+ * For an expr of type `Expr[T]`, where `T` has a method `foo`, the following code
+ * {{{
+ * reify{ expr.splice.foo }
+ * }}}
+ * uses splice to turn an expr of type Expr[T] into a value of type T in the context of `reify`.
+ *
+ * It is equivalent to
+ * {{{
+ * Select( expr.tree, newTermName("foo") )
+ * }}}
+ *
+ * The following example code however does not compile
+ * {{{
+ * reify{ expr.foo }
+ * }}}
+ * because expr of type Expr[T] itself does not have a method foo.
+ */
+ def splice: T
+ /**
+ * A dummy value to denote cross-stage path-dependent type dependencies.
+ *
+ * For example for the following macro definition:
+ * {{{
+ * class X { type T }
+ * object Macros { def foo(x: X): x.T = macro Impls.foo_impl }
+ * }}}
+ *
+ * The corresponding macro implementation should have the following signature (note how the return type denotes path-dependency on x):
+ * {{{
+ * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
+ * }}}
+ */
+ val value: T
+
+ /** case class accessories */
+ override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
+ override def equals(x: Any) = x.isInstanceOf[Expr[_]] && this.mirror == x.asInstanceOf[Expr[_]].mirror && this.tree == x.asInstanceOf[Expr[_]].tree
+ override def hashCode = mirror.hashCode * 31 + tree.hashCode
+ override def toString = "Expr["+staticType+"]("+tree+")"
+ }
+
+ /**
+ * Constructor/Extractor for Expr.
+ *
+ * Can be useful, when having a tree and wanting to splice it in reify call,
+ * in which case the tree first needs to be wrapped in an expr.
+ */
+ object Expr {
+ def apply[T: WeakTypeTag](mirror: scala.reflect.api.Mirror[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
+ def unapply[T](expr: Expr[T]): Option[Tree] = Some(expr.tree)
+ }
+
+ private class ExprImpl[+T: WeakTypeTag](val mirror: Mirror, val treec: TreeCreator) extends Expr[T] {
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # Expr[T] = {
+ val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
+ val tag1 = (implicitly[WeakTypeTag[T]] in otherMirror).asInstanceOf[otherMirror.universe.WeakTypeTag[T]]
+ otherMirror.universe.Expr[T](otherMirror1, treec)(tag1)
+ }
+
+ lazy val tree: Tree = treec(mirror)
+ lazy val staticType: Type = implicitly[WeakTypeTag[T]].tpe
+ def actualType: Type = tree.tpe
+
+ def splice: T = throw new UnsupportedOperationException("""
+ |the function you're calling has not been spliced by the compiler.
+ |this means there is a cross-stage evaluation involved, and it needs to be invoked explicitly.
+ |if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+ |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
+ lazy val value: T = throw new UnsupportedOperationException("""
+ |the value you're calling is only meant to be used in cross-stage path-dependent types.
+ |if you want to splice the underlying expression, use `<your expr>.splice`.
+ |if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath,
+ |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
+
+ private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[WeakTypeTag[T]].in(ru.rootMirror))
+ }
+}
+
+private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakTypeTag[_]) extends Serializable {
+ private def writeObject(out: java.io.ObjectOutputStream): Unit = {
+ out.writeObject(treec)
+ out.writeObject(tag)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream): Unit = {
+ treec = in.readObject().asInstanceOf[TreeCreator]
+ tag = in.readObject().asInstanceOf[ru.WeakTypeTag[_]]
+ }
+
+ private def readResolve(): AnyRef = {
+ import ru._
+ Expr(rootMirror, treec)(tag)
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 6d105c9d20..599c4ca426 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -5,11 +5,16 @@ import scala.language.implicitConversions
trait FlagSets { self: Universe =>
+ /** An abstract type representing sets of flags (like private, final, etc.) that apply to definition trees and symbols */
type FlagSet
+ /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val FlagSetTag: ClassTag[FlagSet]
+
trait FlagOps extends Any {
def | (right: FlagSet): FlagSet
- def hasFlag(flags: FlagSet): Boolean
}
implicit def addFlagOps(left: FlagSet): FlagOps
@@ -18,84 +23,89 @@ trait FlagSets { self: Universe =>
type FlagValues >: Null <: FlagValuesApi
+ // Q: I have a pretty flag. Can I put it here?
+ // A: Only if there's a tree that cannot be built without it.
+ // If you want to put a flag here so that it can be tested against,
+ // introduce an `isXXX` method in one of the `api.Symbols` classes instead.
+
trait FlagValuesApi {
- /** Flag indicating that symbol or tree represents a trait */
+ /** Flag indicating that tree represents a trait */
val TRAIT: FlagSet
- /** Flag indicating that symbol or tree represents a module or its internal module class */
- val MODULE: FlagSet
+ /** Flag indicating that a tree is an interface (i.e. a trait which defines only abstract methods) */
+ val INTERFACE: FlagSet
- /** Flag indicating that symbol or tree represents a mutable variable */
+ /** Flag indicating that tree represents a mutable variable */
val MUTABLE: FlagSet
- /** Flag indicating that symbol or tree represents a package or its internal package class */
- val PACKAGE: FlagSet
-
- /** Flag indicating that symbol or tree represents a method */
- val METHOD: FlagSet
-
- /** Flag indicating that symbol or tree represents a macro definition. */
+ /** Flag indicating that tree represents a macro definition. */
val MACRO: FlagSet
- /** Flag indicating that symbol or tree represents an abstract type, method, or value */
+ /** Flag indicating that tree represents an abstract type, method, or value */
val DEFERRED: FlagSet
- /** Flag indicating that symbol or tree represents an abstract class */
+ /** Flag indicating that tree represents an abstract class */
val ABSTRACT: FlagSet
- /** Flag indicating that symbol or tree has `final` modifier set */
+ /** Flag indicating that tree has `final` modifier set */
val FINAL: FlagSet
- /** Flag indicating that symbol or tree has `sealed` modifier set */
+ /** Flag indicating that tree has `sealed` modifier set */
val SEALED: FlagSet
- /** Flag indicating that symbol or tree has `implicit` modifier set */
+ /** Flag indicating that tree has `implicit` modifier set */
val IMPLICIT: FlagSet
- /** Flag indicating that symbol or tree has `lazy` modifier set */
+ /** Flag indicating that tree has `lazy` modifier set */
val LAZY: FlagSet
- /** Flag indicating that symbol or tree has `override` modifier set */
+ /** Flag indicating that tree has `override` modifier set */
val OVERRIDE: FlagSet
- /** Flag indicating that symbol or tree has `private` modifier set */
+ /** Flag indicating that tree has `private` modifier set */
val PRIVATE: FlagSet
- /** Flag indicating that symbol or tree has `protected` modifier set */
+ /** Flag indicating that tree has `protected` modifier set */
val PROTECTED: FlagSet
- /** Flag indicating that symbol or tree has `case` modifier set */
+ /** Flag indicating that tree represents a member local to current class
+ * (i.e. private[this] or protected[this].
+ * This requires having either PRIVATE or PROTECTED set as well.
+ */
+ val LOCAL: FlagSet
+
+ /** Flag indicating that tree has `case` modifier set */
val CASE: FlagSet
- /** Flag indicating that symbol or tree has `abstract` and `override` modifiers set */
+ /** Flag indicating that tree has `abstract` and `override` modifiers set */
val ABSOVERRIDE: FlagSet
- /** Flag indicating that symbol or tree represents a by-name parameter */
+ /** Flag indicating that tree represents a by-name parameter */
val BYNAMEPARAM: FlagSet
- /** Flag indicating that symbol or tree represents a class or parameter.
+ /** Flag indicating that tree represents a class or parameter.
* Both type and value parameters carry the flag. */
val PARAM: FlagSet
- /** Flag indicating that symbol or tree represents a field of a class
- * that was generated from a parameter of that class */
- val PARAMACCESSOR: FlagSet
-
- /** Flag indicating that symbol or tree represents a field of a case class
- * that corresponds to a parameter in the first parameter list of the
- * primary constructor of that class */
- val CASEACCESSOR: FlagSet
-
- /** Flag indicating that symbol or tree represents a contravariant
+ /** Flag indicating that tree represents a covariant
* type parameter (marked with `+`). */
val COVARIANT: FlagSet
- /** Flag indicating that symbol or tree represents a contravariant
+ /** Flag indicating that tree represents a contravariant
* type parameter (marked with `-`). */
val CONTRAVARIANT: FlagSet
- /** Flag indicating that parameter has a default value */
+ /** Flag indicating that tree represents a parameter that has a default value */
val DEFAULTPARAM: FlagSet
+
+ /** Flag indicating that tree represents an early definition */
+ val PRESUPER: FlagSet
+
+ /** Flag indicating that tree represents a variable or a member initialized to the default value */
+ val DEFAULTINIT: FlagSet
}
+
+ /** The empty set of flags */
+ val NoFlags: FlagSet
}
diff --git a/src/reflect/scala/reflect/api/FrontEnds.scala b/src/reflect/scala/reflect/api/FrontEnds.scala
deleted file mode 100644
index a201b83444..0000000000
--- a/src/reflect/scala/reflect/api/FrontEnds.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package scala.reflect
-package api
-
-// [Martin to Eugene] Todo: Needs to be evicted from API
-// [Eugene++ to Martin] but how? we need them for macros
-trait FrontEnds {
-
- type Position >: Null
-
- trait FrontEnd {
- object severity extends Enumeration
- class Severity(val id: Int) extends severity.Value {
- var count: Int = 0
- override def toString() = this match {
- case INFO => "INFO"
- case WARNING => "WARNING"
- case ERROR => "ERROR"
- case _ => "<unknown>"
- }
- }
- val INFO = new Severity(0)
- val WARNING = new Severity(1)
- val ERROR = new Severity(2)
-
- def hasErrors = ERROR.count > 0
- def hasWarnings = WARNING.count > 0
-
- case class Info(val pos: Position, val msg: String, val severity: Severity)
- val infos = new collection.mutable.LinkedHashSet[Info]
-
- /** Handles incoming info */
- def log(pos: Position, msg: String, severity: Severity) {
- infos += new Info(pos, msg, severity)
- severity.count += 1
- display(infos.last)
- }
-
- /** Displays incoming info */
- def display(info: Info): Unit
-
- /** Services a request to drop into interactive mode */
- def interactive(): Unit
-
- /** Refreshes the UI */
- def flush(): Unit = {}
-
- /** Resets the reporter */
- def reset(): Unit = {
- INFO.count = 0
- WARNING.count = 0
- ERROR.count = 0
- infos.clear()
- }
- }
-
- class SilentFrontEnd extends FrontEnd {
- def display(info: Info) {}
- def interactive() {}
- }
-
- /** Creates a UI-less reporter that simply accumulates all the messages
- */
- def mkSilentFrontEnd(): FrontEnd = new SilentFrontEnd()
-
- /** Creates a reporter that prints messages to the console according to the settings.
- *
- * ``minSeverity'' determines minimum severity of the messages to be printed.
- * 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
- */
- // todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
- def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
index de540a9605..fbc29a514e 100644
--- a/src/reflect/scala/reflect/api/Importers.scala
+++ b/src/reflect/scala/reflect/api/Importers.scala
@@ -1,8 +1,6 @@
package scala.reflect
package api
-// [Martin] Importers need to be made mirror aware.
-// [Eugene++] this is important
trait Importers { self: Universe =>
def mkImporter(from0: Universe): Importer { val from: from0.type }
diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala
index 8bf62a357c..cc703e833d 100644
--- a/src/reflect/scala/reflect/api/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/api/JavaUniverse.scala
@@ -1,19 +1,40 @@
package scala.reflect
package api
-// [Martin] Moved to compiler because it needs to see runtime.Universe
-// The two will be united in scala-reflect anyway.
-trait JavaUniverse extends Universe with Mirrors with TagInterop { self =>
+trait JavaUniverse extends Universe with Mirrors { self =>
type RuntimeClass = java.lang.Class[_]
override type Mirror >: Null <: JavaMirror
- trait JavaMirror extends MirrorOf[self.type] with RuntimeMirror {
+ trait JavaMirror extends scala.reflect.api.Mirror[self.type] with RuntimeMirror {
val classLoader: ClassLoader
override def toString = s"JavaMirror with ${runtime.ReflectionUtils.show(classLoader)}"
}
def runtimeMirror(cl: ClassLoader): Mirror
-}
+ override def typeTagToManifest[T: ClassTag](mirror0: Any, tag: Universe # TypeTag[T]): Manifest[T] = {
+ // SI-6239: make this conversion more precise
+ val mirror = mirror0.asInstanceOf[Mirror]
+ val runtimeClass = mirror.runtimeClass(tag.in(mirror).tpe)
+ Manifest.classType(runtimeClass).asInstanceOf[Manifest[T]]
+ }
+
+ override def manifestToTypeTag[T](mirror0: Any, manifest: Manifest[T]): Universe # TypeTag[T] =
+ TypeTag(mirror0.asInstanceOf[Mirror], new TypeCreator {
+ def apply[U <: Universe with Singleton](mirror: scala.reflect.api.Mirror[U]): U # Type = {
+ mirror.universe match {
+ case ju: JavaUniverse =>
+ val jm = mirror.asInstanceOf[ju.Mirror]
+ val sym = jm.classSymbol(manifest.erasure)
+ val tpe =
+ if (manifest.typeArguments.isEmpty) sym.toType
+ else ju.appliedType(sym.toTypeConstructor, manifest.typeArguments map (targ => ju.manifestToTypeTag(jm, targ)) map (_.in(jm).tpe))
+ tpe.asInstanceOf[U # Type]
+ case u =>
+ u.manifestToTypeTag(mirror.asInstanceOf[u.Mirror], manifest).in(mirror).tpe
+ }
+ }
+ })
+}
diff --git a/src/library/scala/reflect/base/MirrorOf.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 6dc8090eee..2de0d7120e 100644
--- a/src/library/scala/reflect/base/MirrorOf.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -1,16 +1,33 @@
package scala.reflect
-package base
+package api
-// [Eugene++ to Martin] why was this a member of `scala.reflect`, but not `scala.reflect.base`?
-
-abstract class MirrorOf[U <: base.Universe with Singleton] {
- /** .. */
+/**
+ * The base interface for all mirrors.
+ *
+ * @tparam U the type of the universe this mirror belongs to.
+ *
+ * This is defined outside the reflection universe cake pattern implementation
+ * so that it can be referenced from outside. For example TypeCreator and TreeCreator
+ * reference Mirror and also need to be defined outside the cake as they are
+ * used by type tags, which can be migrated between different universes and consequently
+ * cannot be bound to a fixed one.
+ *
+ * @see [[Mirrors]]
+ */
+abstract class Mirror[U <: Universe with Singleton] {
+ /** The universe this mirror belongs to. */
val universe: U
- /** .. */
+ /** The class symbol of the `_root_` package */
def RootClass: U#ClassSymbol
+
+ /** The module symbol of the `_root_` package */
def RootPackage: U#ModuleSymbol
+
+ /** The module class symbol of the default (unnamed) package */
def EmptyPackageClass: U#ClassSymbol
+
+ /** The module symbol of the default (unnamed) package */
def EmptyPackage: U#ModuleSymbol
/** The symbol corresponding to the globally accessible class with the
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index f2f96645e3..bff899daa4 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -1,23 +1,31 @@
package scala.reflect
package api
+/**
+ * Defines a type hierarchy for mirrors.
+ *
+ * Every universe has one or more mirrors. A mirror defines a hierarchy of symbols starting with the root package `_root_`
+ * and provides methods to locate and define classes and singleton objects in that hierarchy.
+ *
+ * On the JVM, there is a one to one correspondance between class loaders and mirrors.
+ */
trait Mirrors { self: Universe =>
+ /** The base type of all mirrors of this universe.
+ *
+ * This abstract type conforms the base interface for all mirrors defined in [[scala.reflect.api.Mirror]]
+ * and is gradually refined in specific universes (e.g. `Mirror` of a [[scala.reflect.api.JavaUniverse]] is capable of reflection).
+ */
+ type Mirror >: Null <: scala.reflect.api.Mirror[self.type]
+
+ /** The root mirror of this universe. This mirror contains standard Scala classes and types such as `Any`, `AnyRef`, `AnyVal`,
+ * `Nothing`, `Null`, and all classes loaded from scala-library, which are shared across all mirrors within the enclosing universe.
+ */
+ val rootMirror: Mirror
+
type RuntimeClass >: Null
- // [Eugene] also, it might make sense to provide shortcuts for the API
- //
- // for example, right now to invoke the same method for several different instances, you need:
- // 1) get the method symbol
- // 2) get the instance mirror for every instance
- // 3) call reflectMethod on the instance mirrors for every instance
- // 4) call apply for every instance (okay, this can be united with step #3, but still)
- //
- // I have several suggestions that we can discuss later:
- // 1) For every `reflectXXX(sym: Symbol): XXXMirror`, add `reflectXXX(name: String, types: Type*): XXXMirror` and `reflectXXXs(): List[XXXMirror]`
- // 2) Provide a way to skip obtaining InstanceMirror (step #2 in the outline provided above)
-
- // [Eugene] another improvement would be have mirrors reproduce the structure of the reflection domain
+ // todo. an improvement might be having mirrors reproduce the structure of the reflection domain
// e.g. a ClassMirror could also have a list of fields, methods, constructors and so on
// read up more on the proposed design in "Reflecting Scala" by Y. Coppel
@@ -162,7 +170,7 @@ trait Mirrors { self: Universe =>
def runtimeClass: RuntimeClass
/** True if the mirror represents the static part
- * if a runtime class or the companion object of a Scala class.
+ * of a runtime class or the companion object of a Scala class.
* One has:
*
* this.isStatic == this.isInstanceOf[ModuleMirror]
@@ -204,7 +212,7 @@ trait Mirrors { self: Universe =>
* Otherwise, if the mirror represents the static part of a runtime class, the
* mirror representing the instance part of the same class.
*/
- def companion: Option[ClassMirror]
+ override def companion: Option[ClassMirror]
}
/** A mirror that reflects the instance parts of a runtime class */
@@ -231,11 +239,11 @@ trait Mirrors { self: Universe =>
* Otherwise, if the mirror represents a runtime instance class, a mirror representing the static
* part of the same class.
*/
- def companion: Option[ModuleMirror]
+ override def companion: Option[ModuleMirror]
}
/** A mirror that reflects instances and static classes */
- trait ReflectiveMirror extends MirrorOf[Mirrors.this.type] {
+ trait ReflectiveMirror extends scala.reflect.api.Mirror[Mirrors.this.type] {
/** A reflective mirror for the given object.
*
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index d6868c26ab..e8665ca736 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -2,23 +2,52 @@ package scala.reflect
package api
/** A trait that manages names.
- * A name is a string in one of two name universes: terms and types.
- * The same string can be a name in both universes.
- * Two names are equal if they represent the same string and they are
- * members of the same universe.
*
- * Names are interned. That is, for two names `name11 and `name2`,
- * `name1 == name2` implies `name1 eq name2`.
+ * @see TermName
+ * @see TypeName
*/
-trait Names extends base.Names {
+trait Names {
+ // Intentionally no implicit from String => Name.
+ implicit def stringToTermName(s: String): TermName = newTermName(s)
+ implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
- /** The abstract type of names */
+ /**
+ * The abstract type of names
+ *
+ * A Name wraps a string as the name for either a type ([[TypeName]]) of a term ([[TermName]]).
+ * Two names are equal, if the wrapped string are equal and they are either both `TypeName` or both `TermName`.
+ * The same string can co-exist as a `TypeName` and a `TermName`, but they would not be equal.
+ * Names are interned. That is, for two names `name11 and `name2`,
+ * `name1 == name2` implies `name1 eq name2`.
+ *
+ * One of the reasons for the existence of names rather than plain strings is being more explicit about what is a name and if it represents a type or a term.
+ */
type Name >: Null <: NameApi
+ implicit val NameTag: ClassTag[Name]
+
+ /** The abstract type of names representing terms */
+ type TypeName >: Null <: Name
+ implicit val TypeNameTag: ClassTag[TypeName]
- /** The extended API of names that's supported on reflect mirror via an
+ /** The abstract type of names representing types */
+ type TermName >: Null <: Name
+ implicit val TermNameTag: ClassTag[TermName]
+
+ /** The API of names that's supported on reflect mirror via an
* implicit conversion in reflect.ops
*/
- abstract class NameApi extends NameBase {
+ abstract class NameApi {
+ /** Checks wether the name is a a term name */
+ def isTermName: Boolean
+
+ /** Checks wether the name is a a type name */
+ def isTypeName: Boolean
+
+ /** Returns a term name that wraps the same string as `this` */
+ def toTermName: TermName
+
+ /** Returns a type name that wraps the same string as `this` */
+ def toTypeName: TypeName
/** Replaces all occurrences of \$op_names in this name by corresponding operator symbols.
* Example: `foo_\$plus\$eq` becomes `foo_+=`
@@ -38,4 +67,20 @@ trait Names extends base.Names {
*/
def encodedName: Name
}
+
+ /** Create a new term name.
+ */
+ def newTermName(s: String): TermName
+
+ /** Creates a new type name.
+ */
+ def newTypeName(s: String): TypeName
+
+ /** Wraps the empty string. Can be used as the null object for term name.
+ */
+ def EmptyTermName: TermName = newTermName("")
+
+ /** Wraps the empty string. Can be used as the null object for type name.
+ */
+ def EmptyTypeName: TypeName = EmptyTermName.toTypeName
}
diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala
new file mode 100644
index 0000000000..9c63e4becf
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Position.scala
@@ -0,0 +1,167 @@
+package scala.reflect
+package api
+
+/** The Position class and its subclasses represent positions of ASTs and symbols.
+ * Except for NoPosition and FakePos, every position refers to a SourceFile
+ * and to an offset in the sourcefile (its `point`). For batch compilation,
+ * that's all. For interactive IDE's there are also RangePositions
+ * and TransparentPositions. A RangePosition indicates a start and an end
+ * in addition to its point. TransparentPositions are a subclass of RangePositions.
+ * Range positions that are not transparent are called opaque.
+ * Trees with RangePositions need to satisfy the following invariants.
+ *
+ * INV1: A tree with an offset position never contains a child
+ * with a range position
+ * INV2: If the child of a tree with a range position also has a range position,
+ * then the child's range is contained in the parent's range.
+ * INV3: Opaque range positions of children of the same node are non-overlapping
+ * (this means their overlap is at most a single point).
+ *
+ * The following tests are useful on positions:
+ *
+ * pos.isDefined true if position is not a NoPosition nor a FakePosition
+ * pos.isRange true if position is a range
+ * pos.isOpaqueRange true if position is an opaque range
+ *
+ * The following accessor methods are provided:
+ *
+ * pos.source The source file of the position, which must be defined
+ * pos.point The offset of the position's point, which must be defined
+ * pos.start The start of the position, which must be a range
+ * pos.end The end of the position, which must be a range
+ *
+ * There are also convenience methods, such as
+ *
+ * pos.startOrPoint
+ * pos.endOrPoint
+ * pos.pointOrElse(default)
+ *
+ * These are less strict about the kind of position on which they can be applied.
+ *
+ * The following conversion methods are often used:
+ *
+ * pos.focus converts a range position to an offset position, keeping its point;
+ * returns all other positions unchanged.
+ * pos.makeTransparent converts an opaque range position into a transparent one.
+ * returns all other positions unchanged.
+ */
+trait Position extends Attachments {
+
+ type Pos >: Null <: Position
+
+ /** Java file corresponding to the source file of this position.
+ *
+ * The return type is [[scala.reflect.io.AbstractFile]], which belongs to an experimental part of Scala reflection.
+ * It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+ * and exposed as a part of scala.reflect.api.
+ */
+ def source: scala.reflect.internal.util.SourceFile
+
+ /** Is this position neither a NoPosition nor a FakePosition?
+ * If isDefined is true, offset and source are both defined.
+ */
+ def isDefined: Boolean
+
+ /** Is this position a range position? */
+ def isRange: Boolean
+
+ /** Is this position a transparent position? */
+ def isTransparent: Boolean
+
+ /** Is this position a non-transparent range position? */
+ def isOpaqueRange: Boolean
+
+ /** if opaque range, make this position transparent */
+ def makeTransparent: Pos
+
+ /** The start of the position's range, error if not a range position */
+ def start: Int
+
+ /** The start of the position's range, or point if not a range position */
+ def startOrPoint: Int
+
+ /** The point (where the ^ is) of the position */
+ def point: Int
+
+ /** The point (where the ^ is) of the position, or else `default` if undefined */
+ def pointOrElse(default: Int): Int
+
+ /** The end of the position's range, error if not a range position */
+ def end: Int
+
+ /** The end of the position's range, or point if not a range position */
+ def endOrPoint: Int
+
+ /** The same position with a different start value (if a range) */
+ def withStart(off: Int): Pos
+
+ /** The same position with a different end value (if a range) */
+ def withEnd(off: Int): Pos
+
+ /** The same position with a different point value (if a range or offset) */
+ def withPoint(off: Int): Pos
+
+ /** If this is a range, the union with the other range, with the point of this position.
+ * Otherwise, this position
+ */
+ def union(pos: Pos): Pos
+
+ /** If this is a range position, the offset position of its point.
+ * Otherwise the position itself
+ */
+ def focus: Pos
+
+ /** If this is a range position, the offset position of its start.
+ * Otherwise the position itself
+ */
+ def focusStart: Pos
+
+ /** If this is a range position, the offset position of its end.
+ * Otherwise the position itself
+ */
+ def focusEnd: Pos
+
+ /** Does this position include the given position `pos`.
+ * This holds if `this` is a range position and its range [start..end]
+ * is the same or covers the range of the given position, which may or may not be a range position.
+ */
+ def includes(pos: Pos): Boolean
+
+ /** Does this position properly include the given position `pos` ("properly" meaning their
+ * ranges are not the same)?
+ */
+ def properlyIncludes(pos: Pos): Boolean
+
+ /** Does this position precede that position?
+ * This holds if both positions are defined and the end point of this position
+ * is not larger than the start point of the given position.
+ */
+ def precedes(pos: Pos): Boolean
+
+ /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
+ * do not share a common point).
+ */
+ def properlyPrecedes(pos: Pos): Boolean
+
+ /** Does this position overlap with that position?
+ * This holds if both positions are ranges and there is an interval of
+ * non-zero length that is shared by both position ranges.
+ */
+ def overlaps(pos: Pos): Boolean
+
+ /** Does this position cover the same range as that position?
+ * Holds only if both position are ranges
+ */
+ def sameRange(pos: Pos): Boolean
+
+ def line: Int
+
+ def column: Int
+
+ /** Convert this to a position around `point` that spans a single source line */
+ def toSingleLine: Pos
+
+ def lineContent: String
+
+ def show: String
+}
diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala
index 9d3d90d9f8..5c530e7e70 100644
--- a/src/reflect/scala/reflect/api/Positions.scala
+++ b/src/reflect/scala/reflect/api/Positions.scala
@@ -1,11 +1,28 @@
package scala.reflect
package api
-trait Positions extends base.Positions {
+/**
+ * Defines the type hierachy for positions.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
+trait Positions {
self: Universe =>
/** .. */
- type Position >: Null <: PositionApi { type Pos = Position }
+ type Position >: Null <: scala.reflect.api.Position { type Pos = Position }
+
+ /** A tag that preserves the identity of the `Position` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val PositionTag: ClassTag[Position]
+
+ /** A special "missing" position. */
+ val NoPosition: Position
+
+ /** Assigns a given position to all position-less nodes of a given AST.
+ */
+ def atPos[T <: Tree](pos: Position)(tree: T): T
/** A position that wraps a set of trees.
* The point of the wrapping position is the point of the default position.
@@ -20,177 +37,4 @@ trait Positions extends base.Positions {
* Otherwise returns a synthetic offset position to point.
*/
def wrappingPos(trees: List[Tree]): Position
-
- /** Ensure that given tree has no positions that overlap with
- * any of the positions of `others`. This is done by
- * shortening the range or assigning TransparentPositions
- * to some of the nodes in `tree`.
- */
- //def ensureNonOverlapping(tree: Tree, others: List[Tree])
- // [Eugene++] can this method be of use for macros?
-}
-
-/** The Position class and its subclasses represent positions of ASTs and symbols.
- * Except for NoPosition and FakePos, every position refers to a SourceFile
- * and to an offset in the sourcefile (its `point`). For batch compilation,
- * that's all. For interactive IDE's there are also RangePositions
- * and TransparentPositions. A RangePosition indicates a start and an end
- * in addition to its point. TransparentPositions are a subclass of RangePositions.
- * Range positions that are not transparent are called opaque.
- * Trees with RangePositions need to satisfy the following invariants.
- *
- * INV1: A tree with an offset position never contains a child
- * with a range position
- * INV2: If the child of a tree with a range position also has a range position,
- * then the child's range is contained in the parent's range.
- * INV3: Opaque range positions of children of the same node are non-overlapping
- * (this means their overlap is at most a single point).
- *
- * The following tests are useful on positions:
- *
- * pos.isDefined true if position is not a NoPosition nor a FakePosition
- * pos.isRange true if position is a range
- * pos.isOpaqueRange true if position is an opaque range
- *
- * The following accessor methods are provided:
- *
- * pos.source The source file of the position, which must be defined
- * pos.point The offset of the position's point, which must be defined
- * pos.start The start of the position, which must be a range
- * pos.end The end of the position, which must be a range
- *
- * There are also convenience methods, such as
- *
- * pos.startOrPoint
- * pos.endOrPoint
- * pos.pointOrElse(default)
- *
- * These are less strict about the kind of position on which they can be applied.
- *
- * The following conversion methods are often used:
- *
- * pos.focus converts a range position to an offset position, keeping its point;
- * returns all other positions unchanged.
- * pos.makeTransparent converts an opaque range position into a transparent one.
- * returns all other positions unchanged.
- */
-trait PositionApi extends Attachments {
-
- type Pos >: Null <: PositionApi
-
- /** Java file corresponding to the source file of this position.
- */
- def fileInfo: java.io.File
-
- /** Content of the source file that contains this position.
- */
- def fileContent: Array[Char]
-
- /** Is this position neither a NoPosition nor a FakePosition?
- * If isDefined is true, offset and source are both defined.
- */
- def isDefined: Boolean
-
- /** Is this position a range position? */
- def isRange: Boolean
-
- /** Is this position a transparent position? */
- def isTransparent: Boolean
-
- /** Is this position a non-transparent range position? */
- def isOpaqueRange: Boolean
-
- /** if opaque range, make this position transparent */
- def makeTransparent: Pos
-
- /** The start of the position's range, error if not a range position */
- def start: Int
-
- /** The start of the position's range, or point if not a range position */
- def startOrPoint: Int
-
- /** The point (where the ^ is) of the position */
- def point: Int
-
- /** The point (where the ^ is) of the position, or else `default` if undefined */
- def pointOrElse(default: Int): Int
-
- /** The end of the position's range, error if not a range position */
- def end: Int
-
- /** The end of the position's range, or point if not a range position */
- def endOrPoint: Int
-
- /** The same position with a different start value (if a range) */
- def withStart(off: Int): Pos
-
- /** The same position with a different end value (if a range) */
- def withEnd(off: Int): Pos
-
- /** The same position with a different point value (if a range or offset) */
- def withPoint(off: Int): Pos
-
- /** If this is a range, the union with the other range, with the point of this position.
- * Otherwise, this position
- */
- def union(pos: Pos): Pos
-
- /** If this is a range position, the offset position of its point.
- * Otherwise the position itself
- */
- def focus: Pos
-
- /** If this is a range position, the offset position of its start.
- * Otherwise the position itself
- */
- def focusStart: Pos
-
- /** If this is a range position, the offset position of its end.
- * Otherwise the position itself
- */
- def focusEnd: Pos
-
- /** Does this position include the given position `pos`.
- * This holds if `this` is a range position and its range [start..end]
- * is the same or covers the range of the given position, which may or may not be a range position.
- */
- def includes(pos: Pos): Boolean
-
- /** Does this position properly include the given position `pos` ("properly" meaning their
- * ranges are not the same)?
- */
- def properlyIncludes(pos: Pos): Boolean
-
- /** Does this position precede that position?
- * This holds if both positions are defined and the end point of this position
- * is not larger than the start point of the given position.
- */
- def precedes(pos: Pos): Boolean
-
- /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
- * do not share a common point).
- */
- def properlyPrecedes(pos: Pos): Boolean
-
- /** Does this position overlap with that position?
- * This holds if both positions are ranges and there is an interval of
- * non-zero length that is shared by both position ranges.
- */
- def overlaps(pos: Pos): Boolean
-
- /** Does this position cover the same range as that position?
- * Holds only if both position are ranges
- */
- def sameRange(pos: Pos): Boolean
-
- def line: Int
-
- def column: Int
-
- /** Convert this to a position around `point` that spans a single source line */
- def toSingleLine: Pos
-
- def lineContent: String
-
- def show: String
}
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 27d3b8ba7d..65ff2ed9fa 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -23,7 +23,7 @@ trait Printers { self: Universe =>
case class BooleanFlag(val value: Option[Boolean])
object BooleanFlag {
- import language.implicitConversions
+ import scala.language.implicitConversions
implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
}
diff --git a/src/library/scala/reflect/base/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala
index a388fdc392..d30da07ad5 100644
--- a/src/library/scala/reflect/base/Scopes.scala
+++ b/src/reflect/scala/reflect/api/Scopes.scala
@@ -1,24 +1,33 @@
package scala.reflect
-package base
+package api
+/**
+ * Defines the type hierachy for scopes.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
trait Scopes { self: Universe =>
- type Scope >: Null <: ScopeBase
+ /** The base type of all scopes. A scope object generally maps names to symbols available in the current lexical scope.
+ * Scopes can be nested. This base type, however, only exposes a minimal interface, representing a scope as an iterable of symbols.
+ */
+ type Scope >: Null <: ScopeApi
- /** The base API that all scopes support */
- trait ScopeBase extends Iterable[Symbol]
+ /** The API that all scopes support */
+ trait ScopeApi extends Iterable[Symbol]
/** A tag that preserves the identity of the `Scope` abstract type from erasure.
* Can be used for pattern matching, instance tests, serialization and likes.
*/
implicit val ScopeTag: ClassTag[Scope]
- type MemberScope >: Null <: Scope with MemberScopeBase
+ /** The type of member scopes, as in class definitions, for example. */
+ type MemberScope >: Null <: Scope with MemberScopeApi
- /** The base API that all member scopes support */
- trait MemberScopeBase extends ScopeBase {
+ /** The API that all member scopes support */
+ trait MemberScopeApi extends ScopeApi {
/** Sorts the symbols included in this scope so that:
- * 1) Symbols appear the linearization order of their owners.
+ * 1) Symbols appear in the linearization order of their owners.
* 2) Symbols with the same owner appear in reverse order of their declarations.
* 3) Synthetic members (e.g. getters/setters for vals/vars) might appear in arbitrary order.
*/
@@ -30,12 +39,12 @@ trait Scopes { self: Universe =>
*/
implicit val MemberScopeTag: ClassTag[MemberScope]
- /** Create a new scope */
+ /** Create a new scope. */
def newScope: Scope
- /** Create a new scope nested in another one with which it shares its elements */
+ /** Create a new scope nested in another one with which it shares its elements. */
def newNestedScope(outer: Scope): Scope
- /** Create a new scope with given initial elements */
+ /** Create a new scope with the given initial elements. */
def newScopeWith(elems: Symbol*): Scope
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index c6f02f1a33..03f2a6b0aa 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -5,12 +5,59 @@
package scala.reflect
package api
-trait StandardDefinitions extends base.StandardDefinitions {
+/**
+ * Defines standard symbols and types.
+ */
+trait StandardDefinitions {
self: Universe =>
+ /** A value containing all standard defnitions. */
val definitions: DefinitionsApi
- trait DefinitionsApi extends DefinitionsBase {
+ /** Defines standard symbols (and types via its base trait). */
+ trait DefinitionsApi extends StandardTypes {
+ /** The class symbol of package `scala`. */
+ def ScalaPackageClass: ClassSymbol
+
+ /** The module class symbol of package `scala`. */
+ def ScalaPackage: ModuleSymbol
+
+ // top types
+ def AnyClass : ClassSymbol
+ def AnyValClass: ClassSymbol
+ def ObjectClass: ClassSymbol
+ def AnyRefClass: TypeSymbol
+
+ // bottom types
+ def NullClass : ClassSymbol
+ def NothingClass: ClassSymbol
+
+ // the scala value classes
+ def UnitClass : ClassSymbol
+ def ByteClass : ClassSymbol
+ def ShortClass : ClassSymbol
+ def CharClass : ClassSymbol
+ def IntClass : ClassSymbol
+ def LongClass : ClassSymbol
+ def FloatClass : ClassSymbol
+ def DoubleClass : ClassSymbol
+ def BooleanClass: ClassSymbol
+
+ /** The class symbol of class `String`. */
+ def StringClass : ClassSymbol
+
+ /** The class symbol of class `Class`. */
+ def ClassClass : ClassSymbol
+
+ /** The class symbol of class `Array`. */
+ def ArrayClass : ClassSymbol
+
+ /** The class symbol of class `List`. */
+ def ListClass : ClassSymbol
+
+ /** The module symbol of `scala.Predef`. */
+ def PredefModule: ModuleSymbol
+
def JavaLangPackageClass: ClassSymbol
def JavaLangPackage: ModuleSymbol
def ArrayModule: ModuleSymbol
@@ -45,4 +92,52 @@ trait StandardDefinitions extends base.StandardDefinitions {
def ScalaPrimitiveValueClasses: List[ClassSymbol]
def ScalaNumericValueClasses: List[ClassSymbol]
}
+
+ /** Defines standard types. */
+ trait StandardTypes {
+ /** The `Type` of type `Unit`. */
+ val UnitTpe: Type
+
+ /** The `Type` of primitive type `Byte`. */
+ val ByteTpe: Type
+
+ /** The `Type` of primitive type `Short`. */
+ val ShortTpe: Type
+
+ /** The `Type` of primitive type `Char`. */
+ val CharTpe: Type
+
+ /** The `Type` of primitive type `Int`. */
+ val IntTpe: Type
+
+ /** The `Type` of primitive type `Long`. */
+ val LongTpe: Type
+
+ /** The `Type` of primitive type `Float`. */
+ val FloatTpe: Type
+
+ /** The `Type` of primitive type `Double`. */
+ val DoubleTpe: Type
+
+ /** The `Type` of primitive type `Boolean`. */
+ val BooleanTpe: Type
+
+ /** The `Type` of type `Any`. */
+ val AnyTpe: Type
+
+ /** The `Type` of type `AnyVal`. */
+ val AnyValTpe: Type
+
+ /** The `Type` of type `AnyRef`. */
+ val AnyRefTpe: Type
+
+ /** The `Type` of type `Object`. */
+ val ObjectTpe: Type
+
+ /** The `Type` of type `Nothing`. */
+ val NothingTpe: Type
+
+ /** The `Type` of type `Null`. */
+ val NullTpe: Type
+ }
}
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
index 65d87ad7f0..354a9f9328 100644
--- a/src/reflect/scala/reflect/api/StandardNames.scala
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -5,26 +5,40 @@
package scala.reflect
package api
-// Q: I have a pretty name. Where do I put it - into base.StandardNames or into api.StandardNames?
-// A: <see base.StandardNames>
+// Q: I have a pretty name. Can I put it here?
+// A: Is it necessary to construct trees (like EMPTY or WILDCARD_STAR)? If yes, then sure.
+// Is it necessary to perform reflection (like ERROR or LOCAL_SUFFIX_STRING)? If yes, then sure.
+// Otherwise you'd better not - reflection API should stay minimalistic.
-trait StandardNames extends base.StandardNames {
+// TODO: document better
+/**
+ * Names necessary to create Scala trees.
+ */
+trait StandardNames {
self: Universe =>
val nme: TermNamesApi
val tpnme: TypeNamesApi
- trait NamesApi extends NamesBase {
+ trait NamesApi {
+ type NameType >: Null <: Name
+ val WILDCARD: NameType
val ROOT: NameType
val EMPTY: NameType
val ERROR: NameType
val PACKAGE: NameType
}
- trait TermNamesApi extends NamesApi with TermNamesBase {
+ trait TermNamesApi extends NamesApi {
+ type NameType = TermName
+ val CONSTRUCTOR: NameType
+ val ROOTPKG: NameType
val LOCAL_SUFFIX_STRING: String
}
- trait TypeNamesApi extends NamesApi with TypeNamesBase {
+ trait TypeNamesApi extends NamesApi {
+ type NameType = TypeName
+ val EMPTY: NameType
+ val WILDCARD_STAR: NameType
}
}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 448382973a..8b24b953ae 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -1,47 +1,224 @@
package scala.reflect
package api
-trait Symbols extends base.Symbols { self: Universe =>
-
- override type Symbol >: Null <: SymbolApi
- override type TypeSymbol >: Null <: Symbol with TypeSymbolApi
- override type TermSymbol >: Null <: Symbol with TermSymbolApi
- override type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
- override type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi
- override type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi
- override type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi
- override type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
+/**
+ * Defines the type hierachy for symbols
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
+trait Symbols { self: Universe =>
+
+ /** The type of symbols representing declarations */
+ type Symbol >: Null <: SymbolApi
+
+ /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SymbolTag: ClassTag[Symbol]
+
+ /** The type of type symbols representing type, class, and trait declarations,
+ * as well as type parameters
+ */
+ type TypeSymbol >: Null <: Symbol with TypeSymbolApi
+
+ /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeSymbolTag: ClassTag[TypeSymbol]
+
+ /** The type of term symbols representing val, var, def, and object declarations as
+ * well as packages and value parameters.
+ */
+ type TermSymbol >: Null <: Symbol with TermSymbolApi
+
+ /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TermSymbolTag: ClassTag[TermSymbol]
+
+ /** The type of method symbols representing def declarations */
+ type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
+
+ /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val MethodSymbolTag: ClassTag[MethodSymbol]
+
+ /** The type of module symbols representing object declarations */
+ type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi
+
+ /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
+
+ /** The type of class symbols representing class and trait definitions */
+ type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi
+
+ /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ClassSymbolTag: ClassTag[ClassSymbol]
+
+ /** The type of free terms introduced by reification */
+ type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi
+
+ /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
+
+ /** The type of free types introduced by reification */
+ type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
+
+ /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
+
+ /** A special "missing" symbol */
+ val NoSymbol: Symbol
/** The API of symbols */
- trait SymbolApi extends SymbolBase with HasFlagsBase { this: Symbol =>
+ trait SymbolApi { this: Symbol =>
+
+ /** The owner of this symbol. This is the symbol
+ * that directly contains the current symbol's definition.
+ * The `NoSymbol` symbol does not have an owner, and calling this method
+ * on one causes an internal error.
+ * The owner of the Scala root class [[scala.reflect.api.Mirror.RootClass]]
+ * and the Scala root object [[scala.reflect.api.Mirror.RootPackage]] is `NoSymbol`.
+ * Every other symbol has a chain of owners that ends in
+ * [[scala.reflect.api.Mirror.RootClass]].
+ */
+ def owner: Symbol
+
+ /** The type of the symbol name.
+ * Can be either `TermName` or `TypeName` depending on whether this is a `TermSymbol` or a `TypeSymbol`.
+ *
+ * Type name namespaces do not intersect with term name namespaces.
+ * This fact is reflected in different types for names of `TermSymbol` and `TypeSymbol`.
+ */
+ type NameType >: Null <: Name
+
+ /** The name of the symbol as a member of the `Name` type.
+ */
+ def name: Name
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by periods.
+ */
+ def fullName: String
+
+ /** Does this symbol represent the definition of a type?
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym` (except for `NoSymbol`),
+ * either `sym.isTerm` is true or `sym.isType` is true.
+ */
+ def isType: Boolean = false
+
+ /** This symbol cast to a TypeSymbol.
+ * @throws ScalaReflectionException if `isType` is false.
+ */
+ def asType: TypeSymbol = throw new ScalaReflectionException(s"$this is not a type")
+
+ /** Does this symbol represent the definition of a term?
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym` (except for `NoSymbol`),
+ * either `sym.isTerm` is true or `sym.isTerm` is true.
+ */
+ def isTerm: Boolean = false
+
+ /** This symbol cast to a TermSymbol.
+ * @throws ScalaReflectionException if `isTerm` is false.
+ */
+ def asTerm: TermSymbol = throw new ScalaReflectionException(s"$this is not a term")
+
+ /** Does this symbol represent the definition of a method?
+ * If yes, `isTerm` is also guaranteed to be true.
+ */
+ def isMethod: Boolean = false
+
+ /** This symbol cast to a MethodSymbol.
+ * @throws ScalaReflectionException if `isMethod` is false.
+ */
+ def asMethod: MethodSymbol = {
+ def overloadedMsg =
+ "encapsulates multiple overloaded alternatives and cannot be treated as a method. "+
+ "Consider invoking `<offending symbol>.asTerm.alternatives` and manually picking the required method"
+ def vanillaMsg = "is not a method"
+ val msg = if (isOverloadedMethod) overloadedMsg else vanillaMsg
+ throw new ScalaReflectionException(s"$this $msg")
+ }
+
+ /** Used to provide a better error message for `asMethod` */
+ protected def isOverloadedMethod = false
+
+ /** Does this symbol represent the definition of a module (i.e. it
+ * results from an object definition?).
+ * If yes, `isTerm` is also guaranteed to be true.
+ */
+ def isModule: Boolean = false
+
+ /** This symbol cast to a ModuleSymbol defined by an object definition.
+ * @throws ScalaReflectionException if `isModule` is false.
+ */
+ def asModule: ModuleSymbol = throw new ScalaReflectionException(s"$this is not a module")
+
+ /** Does this symbol represent the definition of a class or trait?
+ * If yes, `isType` is also guaranteed to be true.
+ */
+ def isClass: Boolean = false
+
+ /** Does this symbol represent the definition of a class implicitly associated
+ * with an object definition (module class in scala compiler parlance).
+ * If yes, `isType` is also guaranteed to be true.
+ */
+ def isModuleClass: Boolean = false
+
+ /** This symbol cast to a ClassSymbol representing a class or trait.
+ * @throws ScalaReflectionException if `isClass` is false.
+ */
+ def asClass: ClassSymbol = throw new ScalaReflectionException(s"$this is not a class")
+
+ /** Does this symbol represent a free term captured by reification?
+ * If yes, `isTerm` is also guaranteed to be true.
+ */
+ def isFreeTerm: Boolean = false
+
+ /** This symbol cast to a free term symbol.
+ * @throws ScalaReflectionException if `isFreeTerm` is false.
+ */
+ def asFreeTerm: FreeTermSymbol = throw new ScalaReflectionException(s"$this is not a free term")
+
+ /** Does this symbol represent a free type captured by reification?
+ * If yes, `isType` is also guaranteed to be true.
+ */
+ def isFreeType: Boolean = false
+
+ /** This symbol cast to a free type symbol.
+ * @throws ScalaReflectionException if `isFreeType` is false.
+ */
+ def asFreeType: FreeTypeSymbol = throw new ScalaReflectionException(s"$this is not a free type")
+
+ def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol
+ def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol)
+ def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
+ def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol
+ def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol
- /** The position of this symbol
+ /** Source file if this symbol is created during this compilation run,
+ * or a class file if this symbol is loaded from a *.class or *.jar.
+ *
+ * The return type is [[scala.reflect.io.AbstractFile]], which belongs to an experimental part of Scala reflection.
+ * It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+ * and exposed as a part of scala.reflect.api.
*/
- def pos: Position
+ def associatedFile: scala.reflect.io.AbstractFile
/** A list of annotations attached to this Symbol.
*/
- // [Eugene++] we cannot expose the `annotations` method because it doesn't auto-initialize a symbol (see SI-5423)
- // there was an idea to use the `isCompilerUniverse` flag and auto-initialize symbols in `annotations` whenever this flag is false
- // but it doesn't work, because the unpickler (that is shared between reflective universes and global universes) is very picky about initialization
- // scala.reflect.internal.Types$TypeError: bad reference while unpickling scala.collection.immutable.Nil: type Nothing not found in scala.type not found.
- // at scala.reflect.internal.pickling.UnPickler$Scan.toTypeError(UnPickler.scala:836)
- // at scala.reflect.internal.pickling.UnPickler$Scan$LazyTypeRef.complete(UnPickler.scala:849) // auto-initialize goes boom
- // at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1140)
- // at scala.reflect.internal.Symbols$Symbol.initialize(Symbols.scala:1272) // this triggers auto-initialize
- // at scala.reflect.internal.Symbols$Symbol.annotations(Symbols.scala:1438) // unpickler first tries to get pre-existing annotations
- // at scala.reflect.internal.Symbols$Symbol.addAnnotation(Symbols.scala:1458) // unpickler tries to add the annotation being read
- // at scala.reflect.internal.pickling.UnPickler$Scan.readSymbolAnnotation(UnPickler.scala:489) // unpickler detects an annotation
- // at scala.reflect.internal.pickling.UnPickler$Scan.run(UnPickler.scala:88)
- // at scala.reflect.internal.pickling.UnPickler.unpickle(UnPickler.scala:37)
- // at scala.reflect.runtime.JavaMirrors$JavaMirror.unpickleClass(JavaMirrors.scala:253) // unpickle from within a reflexive mirror
- // def annotations: List[AnnotationInfo]
- def getAnnotations: List[AnnotationInfo]
-
- /** Whether this symbol carries an annotation for which the given
- * symbol is its typeSymbol.
- */
- def hasAnnotation(sym: Symbol): Boolean
+ def annotations: List[Annotation]
/** For a class: the module or case class factory with the same name in the same package.
* For a module: the class with the same name in the same package.
@@ -59,6 +236,9 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def typeSignature: Type
+ /** Returns all symbols overriden by this symbol. */
+ def allOverriddenSymbols: List[Symbol]
+
/******************* tests *******************/
/** Does this symbol represent a synthetic (i.e. a compiler-generated) entity?
@@ -67,6 +247,11 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isSynthetic: Boolean
+ /** Does this symbol represent an implementation artifact that isn't meant for public use?
+ * Examples of such artifacts are erasure bridges and $outer fields.
+ */
+ def isImplementationArtifact: Boolean
+
/** Does this symbol represent a local declaration or definition?
*
* If yes, either `isPrivate` or `isProtected` are guaranteed to be true.
@@ -110,10 +295,10 @@ trait Symbols extends base.Symbols { self: Universe =>
*
* The java access levels translate as follows:
*
- * java private: hasFlag(PRIVATE) && (privateWithin == NoSymbol)
- * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosingPackage)
- * java protected: hasFlag(PROTECTED) && (privateWithin == enclosingPackage)
- * java public: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == NoSymbol)
+ * java private: isPrivate && (privateWithin == NoSymbol)
+ * java package: !isPrivate && !isProtected && (privateWithin == enclosingPackage)
+ * java protected: isProtected && (privateWithin == enclosingPackage)
+ * java public: !isPrivate && !isProtected && (privateWithin == NoSymbol)
*/
def privateWithin: Symbol
@@ -131,18 +316,6 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isErroneous : Boolean
- /** Can this symbol be loaded by a reflective mirror?
- *
- * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
- * Such annotations (also called "pickles") are applied on top-level classes and include information
- * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
- * are typically unreachable and information about them gets lost.
- *
- * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
- * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
- */
- def isLocatable: Boolean
-
/** Is this symbol static (i.e. with no outer instance)?
* Q: When exactly is a sym marked as STATIC?
* A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
@@ -174,6 +347,14 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isSpecialized: Boolean
+ /** Is this symbol defined by Java?
+ */
+ def isJava: Boolean
+
+ /** Does this symbol represent an implicit value, definition, class or parameter?
+ */
+ def isImplicit: Boolean
+
/******************* helpers *******************/
/** ...
@@ -192,24 +373,27 @@ trait Symbols extends base.Symbols { self: Universe =>
/** ...
*/
def suchThat(cond: Symbol => Boolean): Symbol
-
- /** The string discriminator of this symbol; useful for debugging */
- def kind: String
}
/** The API of term symbols */
- trait TermSymbolApi extends SymbolApi with TermSymbolBase { this: TermSymbol =>
- /** Does this symbol represent a value, i.e. not a module and not a method?
- * [Eugene++] I need a review of the implementation
+ trait TermSymbolApi extends SymbolApi { this: TermSymbol =>
+ /** Term symbols have their names of type `TermName`.
*/
- def isValue: Boolean
+ final type NameType = TermName
+
+ final override def isTerm = true
+ final override def asTerm = this
+
+ /** Is this symbol introduced as `val`?
+ */
+ def isVal: Boolean
/** Does this symbol denote a stable value? */
def isStable: Boolean
- /** Does this symbol represent a mutable value?
+ /** Is this symbol introduced as `var`?
*/
- def isVariable: Boolean
+ def isVar: Boolean
/** Does this symbol represent a getter or a setter?
*/
@@ -230,10 +414,6 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isOverloaded : Boolean
- /** Does this symbol represent an implicit value, definition or parameter?
- */
- def isImplicit: Boolean
-
/** Does this symbol represent a lazy value?
*/
def isLazy: Boolean
@@ -277,7 +457,41 @@ trait Symbols extends base.Symbols { self: Universe =>
}
/** The API of type symbols */
- trait TypeSymbolApi extends SymbolApi with TypeSymbolBase { this: TypeSymbol =>
+ trait TypeSymbolApi extends SymbolApi { this: TypeSymbol =>
+ /** Type symbols have their names of type `TypeName`.
+ */
+ final type NameType = TypeName
+
+ /** The type constructor corresponding to this type symbol.
+ * This is different from `toType` in that type parameters
+ * are part of results of `toType`, but not of `toTypeConstructor`.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.toType` is the type `C[T]`, but `C.toTypeConstructor` is `C`.
+ */
+ def toTypeConstructor: Type
+
+ /** A type reference that refers to this type symbol seen
+ * as a member of given type `site`.
+ */
+ def toTypeIn(site: Type): Type
+
+ /** A type reference that refers to this type symbol
+ * Note if symbol is a member of a class, one almost always is interested
+ * in `asTypeIn` with a site type instead.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.toType` is the type `C[T]`.
+ *
+ * By contrast, `C.typeSignature` would be a type signature of form
+ * `PolyType(ClassInfoType(...))` that describes type parameters, value
+ * parameters, parent types, and members of `C`.
+ */
+ def toType: Type
+
+ final override def isType = true
+ final override def asType = this
+
/** Is the type parameter represented by this symbol contravariant?
*/
def isContravariant : Boolean
@@ -308,7 +522,10 @@ trait Symbols extends base.Symbols { self: Universe =>
}
/** The API of method symbols */
- trait MethodSymbolApi extends TermSymbolApi with MethodSymbolBase { this: MethodSymbol =>
+ trait MethodSymbolApi extends TermSymbolApi { this: MethodSymbol =>
+ final override def isMethod = true
+ final override def asMethod = this
+
/** Does this method represent a constructor?
*
* If `owner` is a class, then this is a vanilla JVM constructor.
@@ -316,6 +533,9 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isConstructor: Boolean
+ /** Does this symbol denote the primary constructor of its enclosing class? */
+ def isPrimaryConstructor: Boolean
+
/** For a polymorphic method, its type parameters, the empty list for all other methods */
def typeParams: List[Symbol]
@@ -336,11 +556,23 @@ trait Symbols extends base.Symbols { self: Universe =>
}
/** The API of module symbols */
- trait ModuleSymbolApi extends TermSymbolApi with ModuleSymbolBase { this: ModuleSymbol =>
+ trait ModuleSymbolApi extends TermSymbolApi { this: ModuleSymbol =>
+ /** The class implicitly associated with the object definition.
+ * One can go back from a module class to the associated module symbol
+ * by inspecting its `selfType.termSymbol`.
+ */
+ def moduleClass: Symbol // needed for tree traversals
+ // when this becomes `moduleClass: ClassSymbol`, it will be the happiest day in my life
+
+ final override def isModule = true
+ final override def asModule = this
}
/** The API of class symbols */
- trait ClassSymbolApi extends TypeSymbolApi with ClassSymbolBase { this: ClassSymbol =>
+ trait ClassSymbolApi extends TypeSymbolApi { this: ClassSymbol =>
+ final override def isClass = true
+ final override def asClass = this
+
/** Does this symbol represent the definition of a primitive class?
* Namely, is it one of [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]],
* [[scala.Short]], [[scala.Byte]], [[scala.Unit]] or [[scala.Boolean]]?
@@ -374,6 +606,22 @@ trait Symbols extends base.Symbols { self: Universe =>
*/
def isSealed: Boolean
+ /** If this is a sealed class, its known direct subclasses.
+ * Otherwise, the empty set.
+ */
+ def knownDirectSubclasses: Set[Symbol]
+
+ /** The list of all base classes of this type (including its own typeSymbol)
+ * in reverse linearization order, starting with the class itself and ending
+ * in class Any.
+ */
+ def baseClasses: List[Symbol]
+
+ /** The module corresponding to this module class,
+ * or NoSymbol if this symbol is not a module class.
+ */
+ def module: Symbol
+
/** If this symbol is a class or trait, its self type, otherwise the type
* of the symbol itself.
*/
@@ -387,7 +635,10 @@ trait Symbols extends base.Symbols { self: Universe =>
}
/** The API of free term symbols */
- trait FreeTermSymbolApi extends TermSymbolApi with FreeTermSymbolBase { this: FreeTermSymbol =>
+ trait FreeTermSymbolApi extends TermSymbolApi { this: FreeTermSymbol =>
+ final override def isFreeTerm = true
+ final override def asFreeTerm = this
+
/** The place where this symbol has been spawned */
def origin: String
@@ -396,7 +647,10 @@ trait Symbols extends base.Symbols { self: Universe =>
}
/** The API of free term symbols */
- trait FreeTypeSymbolApi extends TypeSymbolApi with FreeTypeSymbolBase { this: FreeTypeSymbol =>
+ trait FreeTypeSymbolApi extends TypeSymbolApi { this: FreeTypeSymbol =>
+ final override def isFreeType = true
+ final override def asFreeType = this
+
/** The place where this symbol has been spawned */
def origin: String
}
diff --git a/src/reflect/scala/reflect/api/TagInterop.scala b/src/reflect/scala/reflect/api/TagInterop.scala
index 4d2254cb9f..fc0558d717 100644
--- a/src/reflect/scala/reflect/api/TagInterop.scala
+++ b/src/reflect/scala/reflect/api/TagInterop.scala
@@ -1,38 +1,27 @@
package scala.reflect
package api
-import scala.reflect.base.TypeCreator
-import scala.reflect.base.{Universe => BaseUniverse}
+trait TagInterop { self: Universe =>
+ // TODO `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
+ // if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
-// [Martin] Moved to compiler because it needs to see runtime.Universe
-// The two will be united in scala-reflect anyway.
-trait TagInterop { self: JavaUniverse =>
+ /**
+ * Convert a typetag to a pre `Scala-2.10` manifest.
+ * For example
+ * {{{
+ * typeTagToManifest( scala.reflect.runtime.currentMirror, implicitly[TypeTag[String]] )
+ * }}}
+ */
+ def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] =
+ throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
- // [Eugene++] would be great if we could approximate the interop without any mirrors
- // todo. think how to implement that
-
- override def typeTagToManifest[T: ClassTag](mirror0: Any, tag: base.Universe # TypeTag[T]): Manifest[T] = {
- // [Eugene++] implement more sophisticated logic
- // Martin said it'd be okay to simply copypaste `Implicits.manifestOfType`
- val mirror = mirror0.asInstanceOf[Mirror]
- val runtimeClass = mirror.runtimeClass(tag.in(mirror).tpe)
- Manifest.classType(runtimeClass).asInstanceOf[Manifest[T]]
- }
-
- override def manifestToTypeTag[T](mirror0: Any, manifest: Manifest[T]): base.Universe # TypeTag[T] =
- TypeTag(mirror0.asInstanceOf[Mirror], new TypeCreator {
- def apply[U <: BaseUniverse with Singleton](mirror: MirrorOf[U]): U # Type = {
- mirror.universe match {
- case ju: JavaUniverse =>
- val jm = mirror.asInstanceOf[ju.Mirror]
- val sym = jm.classSymbol(manifest.erasure)
- val tpe =
- if (manifest.typeArguments.isEmpty) sym.toType
- else ju.appliedType(sym.toTypeConstructor, manifest.typeArguments map (targ => ju.manifestToTypeTag(jm, targ)) map (_.in(jm).tpe))
- tpe.asInstanceOf[U # Type]
- case u =>
- u.manifestToTypeTag(mirror.asInstanceOf[u.Mirror], manifest).in(mirror).tpe
- }
- }
- })
+ /**
+ * Convert a pre `Scala-2.10` manifest to a typetag.
+ * For example
+ * {{{
+ * manifestToTypeTag( scala.reflect.runtime.currentMirror, implicitly[Manifest[String]] )
+ * }}}
+ */
+ def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] =
+ throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
}
diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala
new file mode 100644
index 0000000000..a8e8ae1b58
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TreeCreator.scala
@@ -0,0 +1,26 @@
+package scala.reflect
+package api
+
+/** A mirror-aware factory for trees.
+ *
+ * In the reflection API, artifacts are specific to universes and
+ * symbolic references used in artifacts (e.g. `scala.Int`) are resolved by mirrors.
+ *
+ * Therefore to build a tree one needs to know a universe that the tree is going to be bound to
+ * and a mirror that is going to resolve symbolic references (e.g. to determine that `scala.Int`
+ * points to a core class `Int` from scala-library.jar).
+ *
+ * `TreeCreator` implements this notion by providing a standalone tree factory.
+ *
+ * This is immediately useful for reification. When the compiler reifies an expression,
+ * the end result needs to make sense in any mirror. That's because the compiler knows
+ * the universe it's reifying an expression into (specified by the target of the `reify` call),
+ * but it cannot know in advance the mirror to instantiate the result in (e.g. on JVM
+ * it doesn't know what classloader use to resolve symbolic names in the reifee).
+ *
+ * Due to a typechecker restriction (no eta-expansion for dependent method types),
+ * `TreeCreator` can't have a functional type, so it's implemented as class with an apply method.
+ */
+abstract class TreeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Tree
+}
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 5522693b29..1f15ee6070 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -6,12 +6,75 @@ package scala.reflect
package api
// Syncnote: Trees are currently not thread-safe.
-trait Trees extends base.Trees { self: Universe =>
+trait Trees { self: Universe =>
+
+ /** Tree is the basis for scala's abstract syntax. The nodes are
+ * implemented as case classes, and the parameters which initialize
+ * a given tree are immutable: however Trees have several mutable
+ * fields which are manipulated in the course of typechecking,
+ * including pos, symbol, and tpe.
+ *
+ * Newly instantiated trees have tpe set to null (though it
+ * may be set immediately thereafter depending on how it is
+ * constructed.) When a tree is passed to the typer, typically via
+ * `typer.typed(tree)`, under normal circumstances the tpe must be
+ * null or the typer will ignore it. Furthermore, the typer is not
+ * required to return the same tree it was passed.
+ *
+ * Trees can be easily traversed with e.g. foreach on the root node;
+ * for a more nuanced traversal, subclass Traverser. Transformations
+ * can be considerably trickier: see the numerous subclasses of
+ * Transformer found around the compiler.
+ *
+ * Copying Trees should be done with care depending on whether
+ * it needs be done lazily or strictly (see LazyTreeCopier and
+ * StrictTreeCopier) and on whether the contents of the mutable
+ * fields should be copied. The tree copiers will copy the mutable
+ * attributes to the new tree; calling Tree#duplicate will copy
+ * symbol and tpe, but all the positions will be focused.
+ *
+ * Trees can be coarsely divided into four mutually exclusive categories:
+ *
+ * - TermTrees, representing terms
+ * - TypTrees, representing types. Note that is `TypTree`, not `TypeTree`.
+ * - SymTrees, which may represent types or terms.
+ * - Other Trees, which have none of those as parents.
+ *
+ * SymTrees include important nodes Ident and Select, which are
+ * used as both terms and types; they are distinguishable based on
+ * whether the Name is a TermName or TypeName. The correct way
+ * to test any Tree for a type or a term are the `isTerm`/`isType`
+ * methods on Tree.
+ *
+ * "Others" are mostly syntactic or short-lived constructs. Examples
+ * include CaseDef, which wraps individual match cases: they are
+ * neither terms nor types, nor do they carry a symbol. Another
+ * example is Parens, which is eliminated during parsing.
+ */
+ type Tree >: Null <: TreeApi
- override type Tree >: Null <: TreeApi
+ /** A tag that preserves the identity of the `Tree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TreeTag: ClassTag[Tree]
- /** ... */
- trait TreeApi extends TreeBase { this: Tree =>
+ /** The API that all trees support */
+ trait TreeApi extends Product { this: Tree =>
+ // TODO
+ /** ... */
+ def isDef: Boolean
+
+ // TODO
+ /** ... */
+ def isEmpty: Boolean
+
+ /** The canonical way to test if a Tree represents a term.
+ */
+ def isTerm: Boolean
+
+ /** The canonical way to test if a Tree represents a type.
+ */
+ def isType: Boolean
/** ... */
def pos: Position
@@ -107,37 +170,83 @@ trait Trees extends base.Trees { self: Universe =>
* in this tree will be found when searching by position).
*/
def duplicate: this.type
+
+ /** Obtains string representation of a tree */
+ override def toString: String = treeToString(this)
}
- override protected def treeType(tree: Tree) = tree.tpe
+ /** Obtains string representation of a tree */
+ protected def treeToString(tree: Tree): String
- override type TermTree >: Null <: Tree with TermTreeApi
+ /** The empty tree */
+ val EmptyTree: Tree
+
+ /** A tree for a term. Not all trees representing terms are TermTrees; use isTerm
+ * to reliably identify terms.
+ */
+ type TermTree >: Null <: AnyRef with Tree with TermTreeApi
+
+ /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TermTreeTag: ClassTag[TermTree]
/** The API that all term trees support */
trait TermTreeApi extends TreeApi { this: TermTree =>
}
- override type TypTree >: Null <: Tree with TypTreeApi
+ /** A tree for a type. Not all trees representing types are TypTrees; use isType
+ * to reliably identify types.
+ */
+ type TypTree >: Null <: AnyRef with Tree with TypTreeApi
+
+ /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypTreeTag: ClassTag[TypTree]
/** The API that all typ trees support */
trait TypTreeApi extends TreeApi { this: TypTree =>
}
- override type SymTree >: Null <: Tree with SymTreeApi
+ /** A tree with a mutable symbol field, initialized to NoSymbol.
+ */
+ type SymTree >: Null <: AnyRef with Tree with SymTreeApi
+
+ /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SymTreeTag: ClassTag[SymTree]
/** The API that all sym trees support */
trait SymTreeApi extends TreeApi { this: SymTree =>
def symbol: Symbol
}
- override type NameTree >: Null <: Tree with NameTreeApi
+ /** A tree with a name - effectively, a DefTree or RefTree.
+ */
+ type NameTree >: Null <: AnyRef with Tree with NameTreeApi
+
+ /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val NameTreeTag: ClassTag[NameTree]
/** The API that all name trees support */
trait NameTreeApi extends TreeApi { this: NameTree =>
def name: Name
}
- override type RefTree >: Null <: SymTree with NameTree with RefTreeApi
+ /** A tree which references a symbol-carrying entity.
+ * References one, as opposed to defining one; definitions
+ * are in DefTrees.
+ */
+ type RefTree >: Null <: SymTree with NameTree with RefTreeApi
+
+ /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val RefTreeTag: ClassTag[RefTree]
/** The API that all ref trees support */
trait RefTreeApi extends SymTreeApi with NameTreeApi { this: RefTree =>
@@ -145,21 +254,56 @@ trait Trees extends base.Trees { self: Universe =>
def name: Name
}
- override type DefTree >: Null <: SymTree with NameTree with DefTreeApi
+ /** A tree which defines a symbol-carrying entity.
+ */
+ type DefTree >: Null <: SymTree with NameTree with DefTreeApi
+
+ /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val DefTreeTag: ClassTag[DefTree]
/** The API that all def trees support */
trait DefTreeApi extends SymTreeApi with NameTreeApi { this: DefTree =>
def name: Name
}
- override type MemberDef >: Null <: DefTree with MemberDefApi
+ /** Common base class for all member definitions: types, classes,
+ * objects, packages, vals and vars, defs.
+ */
+ type MemberDef >: Null <: DefTree with MemberDefApi
+
+ /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val MemberDefTag: ClassTag[MemberDef]
/** The API that all member defs support */
trait MemberDefApi extends DefTreeApi { this: MemberDef =>
def mods: Modifiers
}
- override type PackageDef >: Null <: MemberDef with PackageDefApi
+ /** A packaging, such as `package pid { stats }`
+ */
+ type PackageDef >: Null <: MemberDef with PackageDefApi
+
+ /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val PackageDefTag: ClassTag[PackageDef]
+
+ /** The constructor/deconstructor for `PackageDef` instances. */
+ val PackageDef: PackageDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `PackageDef(pid, stats)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `package` pid { stats }
+ */
+ abstract class PackageDefExtractor {
+ def apply(pid: RefTree, stats: List[Tree]): PackageDef
+ def unapply(packageDef: PackageDef): Option[(RefTree, List[Tree])]
+ }
/** The API that all package defs support */
trait PackageDefApi extends MemberDefApi { this: PackageDef =>
@@ -167,14 +311,45 @@ trait Trees extends base.Trees { self: Universe =>
val stats: List[Tree]
}
- override type ImplDef >: Null <: MemberDef with ImplDefApi
+ /** A common base class for class and object definitions.
+ */
+ type ImplDef >: Null <: MemberDef with ImplDefApi
+
+ /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ImplDefTag: ClassTag[ImplDef]
/** The API that all impl defs support */
trait ImplDefApi extends MemberDefApi { this: ImplDef =>
val impl: Template
}
- override type ClassDef >: Null <: ImplDef with ClassDefApi
+ /** A class definition.
+ */
+ type ClassDef >: Null <: ImplDef with ClassDefApi
+
+ /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ClassDefTag: ClassTag[ClassDef]
+
+ /** The constructor/deconstructor for `ClassDef` instances. */
+ val ClassDef: ClassDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `ClassDef(mods, name, tparams, impl)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `class` name [tparams] impl
+ *
+ * Where impl stands for:
+ *
+ * `extends` parents { defs }
+ */
+ abstract class ClassDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template): ClassDef
+ def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)]
+ }
/** The API that all class defs support */
trait ClassDefApi extends ImplDefApi { this: ClassDef =>
@@ -184,7 +359,33 @@ trait Trees extends base.Trees { self: Universe =>
val impl: Template
}
- override type ModuleDef >: Null <: ImplDef with ModuleDefApi
+ /** An object definition, e.g. `object Foo`. Internally, objects are
+ * quite frequently called modules to reduce ambiguity.
+ * Eliminated by compiler phase refcheck.
+ */
+ type ModuleDef >: Null <: ImplDef with ModuleDefApi
+
+ /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ModuleDefTag: ClassTag[ModuleDef]
+
+ /** The constructor/deconstructor for `ModuleDef` instances. */
+ val ModuleDef: ModuleDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `ModuleDef(mods, name, impl)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `object` name impl
+ *
+ * Where impl stands for:
+ *
+ * `extends` parents { defs }
+ */
+ abstract class ModuleDefExtractor {
+ def apply(mods: Modifiers, name: TermName, impl: Template): ModuleDef
+ def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)]
+ }
/** The API that all module defs support */
trait ModuleDefApi extends ImplDefApi { this: ModuleDef =>
@@ -193,7 +394,14 @@ trait Trees extends base.Trees { self: Universe =>
val impl: Template
}
- override type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi
+ /** A common base class for ValDefs and DefDefs.
+ */
+ type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi
+
+ /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
/** The API that all val defs and def defs support */
trait ValOrDefDefApi extends MemberDefApi { this: ValOrDefDef =>
@@ -202,7 +410,42 @@ trait Trees extends base.Trees { self: Universe =>
def rhs: Tree
}
- override type ValDef >: Null <: ValOrDefDef with ValDefApi
+ /** Broadly speaking, a value definition. All these are encoded as ValDefs:
+ *
+ * - immutable values, e.g. "val x"
+ * - mutable values, e.g. "var x" - the MUTABLE flag set in mods
+ * - lazy values, e.g. "lazy val x" - the LAZY flag set in mods
+ * - method parameters, see vparamss in [[scala.reflect.api.Trees#DefDef]] - the PARAM flag is set in mods
+ * - explicit self-types, e.g. class A { self: Bar => }
+ */
+ type ValDef >: Null <: ValOrDefDef with ValDefApi
+
+ /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ValDefTag: ClassTag[ValDef]
+
+ /** The constructor/deconstructor for `ValDef` instances. */
+ val ValDef: ValDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `ValDef(mods, name, tpt, rhs)`.
+ * This AST node corresponds to any of the following Scala code:
+ *
+ * mods `val` name: tpt = rhs
+ *
+ * mods `var` name: tpt = rhs
+ *
+ * mods name: tpt = rhs // in signatures of function and method definitions
+ *
+ * self: Bar => // self-types
+ *
+ * If the type of a value is not specified explicitly (i.e. is meant to be inferred),
+ * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
+ */
+ abstract class ValDefExtractor {
+ def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
+ def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)]
+ }
/** The API that all val defs support */
trait ValDefApi extends ValOrDefDefApi { this: ValDef =>
@@ -212,7 +455,31 @@ trait Trees extends base.Trees { self: Universe =>
val rhs: Tree
}
- override type DefDef >: Null <: ValOrDefDef with DefDefApi
+ /** A method or macro definition.
+ * @param name The name of the method or macro. Can be a type name in case this is a type macro
+ */
+ type DefDef >: Null <: ValOrDefDef with DefDefApi
+
+ /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val DefDefTag: ClassTag[DefDef]
+
+ /** The constructor/deconstructor for `DefDef` instances. */
+ val DefDef: DefDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `DefDef(mods, name, tparams, vparamss, tpt, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `def` name[tparams](vparams_1)...(vparams_n): tpt = rhs
+ *
+ * If the return type is not specified explicitly (i.e. is meant to be inferred),
+ * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
+ */
+ abstract class DefDefExtractor {
+ def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+ def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+ }
/** The API that all def defs support */
trait DefDefApi extends ValOrDefDefApi { this: DefDef =>
@@ -224,7 +491,34 @@ trait Trees extends base.Trees { self: Universe =>
val rhs: Tree
}
- override type TypeDef >: Null <: MemberDef with TypeDefApi
+ /** An abstract type, a type parameter, or a type alias.
+ * Eliminated by erasure.
+ */
+ type TypeDef >: Null <: MemberDef with TypeDefApi
+
+ /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeDefTag: ClassTag[TypeDef]
+
+ /** The constructor/deconstructor for `TypeDef` instances. */
+ val TypeDef: TypeDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeDef(mods, name, tparams, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `type` name[tparams] = rhs
+ *
+ * mods `type` name[tparams] >: lo <: hi
+ *
+ * First usage illustrates `TypeDefs` representing type aliases and type parameters.
+ * Second usage illustrates `TypeDefs` representing abstract types,
+ * where lo and hi are both `TypeBoundsTrees` and `Modifier.deferred` is set in mods.
+ */
+ abstract class TypeDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef
+ def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)]
+ }
/** The API that all type defs support */
trait TypeDefApi extends MemberDefApi { this: TypeDef =>
@@ -234,7 +528,46 @@ trait Trees extends base.Trees { self: Universe =>
val rhs: Tree
}
- override type LabelDef >: Null <: DefTree with TermTree with LabelDefApi
+ /** A labelled expression. Not expressible in language syntax, but
+ * generated by the compiler to simulate while/do-while loops, and
+ * also by the pattern matcher.
+ *
+ * The label acts much like a nested function, where `params` represents
+ * the incoming parameters. The symbol given to the LabelDef should have
+ * a MethodType, as if it were a nested function.
+ *
+ * Jumps are apply nodes attributed with a label's symbol. The
+ * arguments from the apply node will be passed to the label and
+ * assigned to the Idents.
+ *
+ * Forward jumps within a block are allowed.
+ */
+ type LabelDef >: Null <: DefTree with TermTree with LabelDefApi
+
+ /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val LabelDefTag: ClassTag[LabelDef]
+
+ /** The constructor/deconstructor for `LabelDef` instances. */
+ val LabelDef: LabelDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `LabelDef(name, params, rhs)`.
+ *
+ * This AST node does not have direct correspondence to Scala code.
+ * It is used for tailcalls and like.
+ * For example, while/do are desugared to label defs as follows:
+ * {{{
+ * while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
+ * }}}
+ * {{{
+ * do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
+ * }}}
+ */
+ abstract class LabelDefExtractor {
+ def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
+ def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)]
+ }
/** The API that all label defs support */
trait LabelDefApi extends DefTreeApi with TermTreeApi { this: LabelDef =>
@@ -243,7 +576,34 @@ trait Trees extends base.Trees { self: Universe =>
val rhs: Tree
}
- override type ImportSelector >: Null <: ImportSelectorApi
+ /** Import selector
+ *
+ * Representation of an imported name its optional rename and their optional positions
+ *
+ * Eliminated by typecheck.
+ *
+ * @param name the imported name
+ * @param namePos its position or -1 if undefined
+ * @param rename the name the import is renamed to (== name if no renaming)
+ * @param renamePos the position of the rename or -1 if undefined
+ */
+ type ImportSelector >: Null <: AnyRef with ImportSelectorApi
+
+ /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ImportSelectorTag: ClassTag[ImportSelector]
+
+ /** The constructor/deconstructor for `ImportSelector` instances. */
+ val ImportSelector: ImportSelectorExtractor
+
+ /** An extractor class to create and pattern match with syntax `ImportSelector(name:, namePos, rename, renamePos)`.
+ * This is not an AST node, it is used as a part of the `Import` node.
+ */
+ abstract class ImportSelectorExtractor {
+ def apply(name: Name, namePos: Int, rename: Name, renamePos: Int): ImportSelector
+ def unapply(importSelector: ImportSelector): Option[(Name, Int, Name, Int)]
+ }
/** The API that all import selectors support */
trait ImportSelectorApi { this: ImportSelector =>
@@ -253,7 +613,42 @@ trait Trees extends base.Trees { self: Universe =>
val renamePos: Int
}
- override type Import >: Null <: SymTree with ImportApi
+ /** Import clause
+ *
+ * @param expr
+ * @param selectors
+ */
+ type Import >: Null <: SymTree with ImportApi
+
+ /** A tag that preserves the identity of the `Import` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ImportTag: ClassTag[Import]
+
+ /** The constructor/deconstructor for `Import` instances. */
+ val Import: ImportExtractor
+
+ /** An extractor class to create and pattern match with syntax `Import(expr, selectors)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * import expr.{selectors}
+ *
+ * Selectors are a list of ImportSelectors, which conceptually are pairs of names (from, to).
+ * The last (and maybe only name) may be a nme.WILDCARD. For instance:
+ *
+ * import qual.{x, y => z, _}
+ *
+ * Would be represented as:
+ *
+ * Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
+ *
+ * The symbol of an `Import` is an import symbol @see Symbol.newImport.
+ * It's used primarily as a marker to check that the import has been typechecked.
+ */
+ abstract class ImportExtractor {
+ def apply(expr: Tree, selectors: List[ImportSelector]): Import
+ def unapply(import_ : Import): Option[(Tree, List[ImportSelector])]
+ }
/** The API that all imports support */
trait ImportApi extends SymTreeApi { this: Import =>
@@ -261,7 +656,43 @@ trait Trees extends base.Trees { self: Universe =>
val selectors: List[ImportSelector]
}
- override type Template >: Null <: SymTree with TemplateApi
+ /** Instantiation template of a class or trait
+ *
+ * @param parents
+ * @param body
+ */
+ type Template >: Null <: SymTree with TemplateApi
+
+ /** A tag that preserves the identity of the `Template` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TemplateTag: ClassTag[Template]
+
+ /** The constructor/deconstructor for `Template` instances. */
+ val Template: TemplateExtractor
+
+ /** An extractor class to create and pattern match with syntax `Template(parents, self, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `extends` parents { self => body }
+ *
+ * In case when the self-type annotation is missing, it is represented as
+ * an empty value definition with nme.WILDCARD as name and NoType as type.
+ *
+ * The symbol of a template is a local dummy. @see Symbol.newLocalDummy
+ * The owner of the local dummy is the enclosing trait or class.
+ * The local dummy is itself the owner of any local blocks. For example:
+ *
+ * class C {
+ * def foo { // owner is C
+ * def bar // owner is local dummy
+ * }
+ * }
+ */
+ abstract class TemplateExtractor {
+ def apply(parents: List[Tree], self: ValDef, body: List[Tree]): Template
+ def unapply(template: Template): Option[(List[Tree], ValDef, List[Tree])]
+ }
/** The API that all templates support */
trait TemplateApi extends SymTreeApi { this: Template =>
@@ -270,7 +701,28 @@ trait Trees extends base.Trees { self: Universe =>
val body: List[Tree]
}
- override type Block >: Null <: TermTree with BlockApi
+ /** Block of expressions (semicolon separated expressions) */
+ type Block >: Null <: TermTree with BlockApi
+
+ /** A tag that preserves the identity of the `Block` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val BlockTag: ClassTag[Block]
+
+ /** The constructor/deconstructor for `Block` instances. */
+ val Block: BlockExtractor
+
+ /** An extractor class to create and pattern match with syntax `Block(stats, expr)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * { stats; expr }
+ *
+ * If the block is empty, the `expr` is set to `Literal(Constant(()))`.
+ */
+ abstract class BlockExtractor {
+ def apply(stats: List[Tree], expr: Tree): Block
+ def unapply(block: Block): Option[(List[Tree], Tree)]
+ }
/** The API that all blocks support */
trait BlockApi extends TermTreeApi { this: Block =>
@@ -278,7 +730,32 @@ trait Trees extends base.Trees { self: Universe =>
val expr: Tree
}
- override type CaseDef >: Null <: Tree with CaseDefApi
+ /** Case clause in a pattern match.
+ * (except for occurrences in switch statements).
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher)
+ */
+ type CaseDef >: Null <: AnyRef with Tree with CaseDefApi
+
+ /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val CaseDefTag: ClassTag[CaseDef]
+
+ /** The constructor/deconstructor for `CaseDef` instances. */
+ val CaseDef: CaseDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `CaseDef(pat, guard, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `case` pat `if` guard => body
+ *
+ * If the guard is not present, the `guard` is set to `EmptyTree`.
+ * If the body is not specified, the `body` is set to `Literal(Constant())`
+ */
+ abstract class CaseDefExtractor {
+ def apply(pat: Tree, guard: Tree, body: Tree): CaseDef
+ def unapply(caseDef: CaseDef): Option[(Tree, Tree, Tree)]
+ }
/** The API that all case defs support */
trait CaseDefApi extends TreeApi { this: CaseDef =>
@@ -287,21 +764,92 @@ trait Trees extends base.Trees { self: Universe =>
val body: Tree
}
- override type Alternative >: Null <: TermTree with AlternativeApi
+ /** Alternatives of patterns.
+ *
+ * Eliminated by compiler phases Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher),
+ * except for
+ * occurrences in encoded Switch stmt (i.e. remaining Match(CaseDef(...)))
+ */
+ type Alternative >: Null <: TermTree with AlternativeApi
+
+ /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AlternativeTag: ClassTag[Alternative]
+
+ /** The constructor/deconstructor for `Alternative` instances. */
+ val Alternative: AlternativeExtractor
+
+ /** An extractor class to create and pattern match with syntax `Alternative(trees)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * pat1 | ... | patn
+ */
+ abstract class AlternativeExtractor {
+ def apply(trees: List[Tree]): Alternative
+ def unapply(alternative: Alternative): Option[List[Tree]]
+ }
/** The API that all alternatives support */
trait AlternativeApi extends TermTreeApi { this: Alternative =>
val trees: List[Tree]
}
- override type Star >: Null <: TermTree with StarApi
+ /** Repetition of pattern.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
+ */
+ type Star >: Null <: TermTree with StarApi
+
+ /** A tag that preserves the identity of the `Star` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val StarTag: ClassTag[Star]
+
+ /** The constructor/deconstructor for `Star` instances. */
+ val Star: StarExtractor
+
+ /** An extractor class to create and pattern match with syntax `Star(elem)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * pat*
+ */
+ abstract class StarExtractor {
+ def apply(elem: Tree): Star
+ def unapply(star: Star): Option[Tree]
+ }
/** The API that all stars support */
trait StarApi extends TermTreeApi { this: Star =>
val elem: Tree
}
- override type Bind >: Null <: DefTree with BindApi
+ /** Bind a variable to a rhs pattern.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
+ *
+ * @param name
+ * @param body
+ */
+ type Bind >: Null <: DefTree with BindApi
+
+ /** A tag that preserves the identity of the `Bind` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val BindTag: ClassTag[Bind]
+
+ /** The constructor/deconstructor for `Bind` instances. */
+ val Bind: BindExtractor
+
+ /** An extractor class to create and pattern match with syntax `Bind(name, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * pat*
+ */
+ abstract class BindExtractor {
+ def apply(name: Name, body: Tree): Bind
+ def unapply(bind: Bind): Option[(Name, Tree)]
+ }
/** The API that all binds support */
trait BindApi extends DefTreeApi { this: Bind =>
@@ -309,7 +857,51 @@ trait Trees extends base.Trees { self: Universe =>
val body: Tree
}
- override type UnApply >: Null <: TermTree with UnApplyApi
+ /**
+ * Used to represent `unapply` methods in pattern matching.
+ *
+ * For example:
+ * {{{
+ * 2 match { case Foo(x) => x }
+ * }}}
+ *
+ * Is represented as:
+ * {{{
+ * Match(
+ * Literal(Constant(2)),
+ * List(
+ * CaseDef(
+ * UnApply(
+ * // a dummy node that carries the type of unapplication to patmat
+ * // the <unapply-selector> here doesn't have an underlying symbol
+ * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+ * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * // arguments of the unapply => nothing synthetic here
+ * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * EmptyTree,
+ * Ident(newTermName("x")))))
+ * }}}
+ *
+ * Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
+ */
+ type UnApply >: Null <: TermTree with UnApplyApi
+
+ /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val UnApplyTag: ClassTag[UnApply]
+
+ /** The constructor/deconstructor for `UnApply` instances. */
+ val UnApply: UnApplyExtractor
+
+ /** An extractor class to create and pattern match with syntax `UnApply(fun, args)`.
+ * This AST node does not have direct correspondence to Scala code,
+ * and is introduced when typechecking pattern matches and `try` blocks.
+ */
+ abstract class UnApplyExtractor {
+ def apply(fun: Tree, args: List[Tree]): UnApply
+ def unapply(unApply: UnApply): Option[(Tree, List[Tree])]
+ }
/** The API that all unapplies support */
trait UnApplyApi extends TermTreeApi { this: UnApply =>
@@ -317,23 +909,56 @@ trait Trees extends base.Trees { self: Universe =>
val args: List[Tree]
}
- override type ArrayValue >: Null <: TermTree with ArrayValueApi
+ /** Anonymous function, eliminated by compiler phase lambdalift */
+ type Function >: Null <: TermTree with SymTree with FunctionApi
- /** The API that all array values support */
- trait ArrayValueApi extends TermTreeApi { this: ArrayValue =>
- val elemtpt: Tree
- val elems: List[Tree]
+ /** A tag that preserves the identity of the `Function` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val FunctionTag: ClassTag[Function]
+
+ /** The constructor/deconstructor for `Function` instances. */
+ val Function: FunctionExtractor
+
+ /** An extractor class to create and pattern match with syntax `Function(vparams, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * vparams => body
+ *
+ * The symbol of a Function is a synthetic TermSymbol.
+ * It is the owner of the function's parameters.
+ */
+ abstract class FunctionExtractor {
+ def apply(vparams: List[ValDef], body: Tree): Function
+ def unapply(function: Function): Option[(List[ValDef], Tree)]
}
- override type Function >: Null <: TermTree with SymTree with FunctionApi
-
/** The API that all functions support */
trait FunctionApi extends TermTreeApi with SymTreeApi { this: Function =>
val vparams: List[ValDef]
val body: Tree
}
- override type Assign >: Null <: TermTree with AssignApi
+ /** Assignment */
+ type Assign >: Null <: TermTree with AssignApi
+
+ /** A tag that preserves the identity of the `Assign` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AssignTag: ClassTag[Assign]
+
+ /** The constructor/deconstructor for `Assign` instances. */
+ val Assign: AssignExtractor
+
+ /** An extractor class to create and pattern match with syntax `Assign(lhs, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * lhs = rhs
+ */
+ abstract class AssignExtractor {
+ def apply(lhs: Tree, rhs: Tree): Assign
+ def unapply(assign: Assign): Option[(Tree, Tree)]
+ }
/** The API that all assigns support */
trait AssignApi extends TermTreeApi { this: Assign =>
@@ -341,7 +966,34 @@ trait Trees extends base.Trees { self: Universe =>
val rhs: Tree
}
- override type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi
+ /** Either an assignment or a named argument. Only appears in argument lists,
+ * eliminated by compiler phase typecheck (doTypedApply), resurrected by reifier.
+ */
+ type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi
+
+ /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
+
+ /** The constructor/deconstructor for `AssignOrNamedArg` instances. */
+ val AssignOrNamedArg: AssignOrNamedArgExtractor
+
+ /** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * {{{
+ * m.f(lhs = rhs)
+ * }}}
+ * {{{
+ * @annotation(lhs = rhs)
+ * }}}
+ *
+ */
+ abstract class AssignOrNamedArgExtractor {
+ def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg
+ def unapply(assignOrNamedArg: AssignOrNamedArg): Option[(Tree, Tree)]
+ }
/** The API that all assigns support */
trait AssignOrNamedArgApi extends TermTreeApi { this: AssignOrNamedArg =>
@@ -349,7 +1001,28 @@ trait Trees extends base.Trees { self: Universe =>
val rhs: Tree
}
- override type If >: Null <: TermTree with IfApi
+ /** Conditional expression */
+ type If >: Null <: TermTree with IfApi
+
+ /** A tag that preserves the identity of the `If` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val IfTag: ClassTag[If]
+
+ /** The constructor/deconstructor for `If` instances. */
+ val If: IfExtractor
+
+ /** An extractor class to create and pattern match with syntax `If(cond, thenp, elsep)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `if` (cond) thenp `else` elsep
+ *
+ * If the alternative is not present, the `elsep` is set to `Literal(Constant(()))`.
+ */
+ abstract class IfExtractor {
+ def apply(cond: Tree, thenp: Tree, elsep: Tree): If
+ def unapply(if_ : If): Option[(Tree, Tree, Tree)]
+ }
/** The API that all ifs support */
trait IfApi extends TermTreeApi { this: If =>
@@ -358,7 +1031,38 @@ trait Trees extends base.Trees { self: Universe =>
val elsep: Tree
}
- override type Match >: Null <: TermTree with MatchApi
+ /** - Pattern matching expression (before compiler phase explicitouter before 2.10 / patmat from 2.10)
+ * - Switch statements (after compiler phase explicitouter before 2.10 / patmat from 2.10)
+ *
+ * After compiler phase explicitouter before 2.10 / patmat from 2.10, cases will satisfy the following constraints:
+ *
+ * - all guards are `EmptyTree`,
+ * - all patterns will be either `Literal(Constant(x:Int))`
+ * or `Alternative(lit|...|lit)`
+ * - except for an "otherwise" branch, which has pattern
+ * `Ident(nme.WILDCARD)`
+ */
+ type Match >: Null <: TermTree with MatchApi
+
+ /** A tag that preserves the identity of the `Match` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val MatchTag: ClassTag[Match]
+
+ /** The constructor/deconstructor for `Match` instances. */
+ val Match: MatchExtractor
+
+ /** An extractor class to create and pattern match with syntax `Match(selector, cases)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * selector `match` { cases }
+ *
+ * `Match` is also used in pattern matching assignments like `val (foo, bar) = baz`.
+ */
+ abstract class MatchExtractor {
+ def apply(selector: Tree, cases: List[CaseDef]): Match
+ def unapply(match_ : Match): Option[(Tree, List[CaseDef])]
+ }
/** The API that all matches support */
trait MatchApi extends TermTreeApi { this: Match =>
@@ -366,14 +1070,56 @@ trait Trees extends base.Trees { self: Universe =>
val cases: List[CaseDef]
}
- override type Return >: Null <: TermTree with SymTree with ReturnApi
+ /** Return expression */
+ type Return >: Null <: TermTree with SymTree with ReturnApi
+
+ /** A tag that preserves the identity of the `Return` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ReturnTag: ClassTag[Return]
+
+ /** The constructor/deconstructor for `Return` instances. */
+ val Return: ReturnExtractor
+
+ /** An extractor class to create and pattern match with syntax `Return(expr)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `return` expr
+ *
+ * The symbol of a Return node is the enclosing method.
+ */
+ abstract class ReturnExtractor {
+ def apply(expr: Tree): Return
+ def unapply(return_ : Return): Option[Tree]
+ }
/** The API that all returns support */
trait ReturnApi extends TermTreeApi { this: Return =>
val expr: Tree
}
- override type Try >: Null <: TermTree with TryApi
+ /** [Eugene++] comment me! */
+ type Try >: Null <: TermTree with TryApi
+
+ /** A tag that preserves the identity of the `Try` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TryTag: ClassTag[Try]
+
+ /** The constructor/deconstructor for `Try` instances. */
+ val Try: TryExtractor
+
+ /** An extractor class to create and pattern match with syntax `Try(block, catches, finalizer)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `try` block `catch` { catches } `finally` finalizer
+ *
+ * If the finalizer is not present, the `finalizer` is set to `EmptyTree`.
+ */
+ abstract class TryExtractor {
+ def apply(block: Tree, catches: List[CaseDef], finalizer: Tree): Try
+ def unapply(try_ : Try): Option[(Tree, List[CaseDef], Tree)]
+ }
/** The API that all tries support */
trait TryApi extends TermTreeApi { this: Try =>
@@ -382,21 +1128,89 @@ trait Trees extends base.Trees { self: Universe =>
val finalizer: Tree
}
- override type Throw >: Null <: TermTree with ThrowApi
+ /** Throw expression */
+ type Throw >: Null <: TermTree with ThrowApi
+
+ /** A tag that preserves the identity of the `Throw` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ThrowTag: ClassTag[Throw]
+
+ /** The constructor/deconstructor for `Throw` instances. */
+ val Throw: ThrowExtractor
+
+ /** An extractor class to create and pattern match with syntax `Throw(expr)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `throw` expr
+ */
+ abstract class ThrowExtractor {
+ def apply(expr: Tree): Throw
+ def unapply(throw_ : Throw): Option[Tree]
+ }
/** The API that all tries support */
trait ThrowApi extends TermTreeApi { this: Throw =>
val expr: Tree
}
- override type New >: Null <: TermTree with NewApi
+ /** Object instantiation
+ */
+ type New >: Null <: TermTree with NewApi
+
+ /** A tag that preserves the identity of the `New` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val NewTag: ClassTag[New]
+
+ /** The constructor/deconstructor for `New` instances.
+ */
+ val New: NewExtractor
+
+ /** An extractor class to create and pattern match with syntax `New(tpt)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `new` T
+ *
+ * This node always occurs in the following context:
+ *
+ * (`new` tpt).<init>[targs](args)
+ */
+ abstract class NewExtractor {
+ /** A user level `new`.
+ * One should always use this factory method to build a user level `new`.
+ *
+ * @param tpt a class type
+ */
+ def apply(tpt: Tree): New
+ def unapply(new_ : New): Option[Tree]
+ }
/** The API that all news support */
trait NewApi extends TermTreeApi { this: New =>
val tpt: Tree
}
- override type Typed >: Null <: TermTree with TypedApi
+ /** Type annotation, eliminated by compiler phase cleanup */
+ type Typed >: Null <: TermTree with TypedApi
+
+ /** A tag that preserves the identity of the `Typed` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypedTag: ClassTag[Typed]
+
+ /** The constructor/deconstructor for `Typed` instances. */
+ val Typed: TypedExtractor
+
+ /** An extractor class to create and pattern match with syntax `Typed(expr, tpt)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * expr: tpt
+ */
+ abstract class TypedExtractor {
+ def apply(expr: Tree, tpt: Tree): Typed
+ def unapply(typed: Typed): Option[(Tree, Tree)]
+ }
/** The API that all typeds support */
trait TypedApi extends TermTreeApi { this: Typed =>
@@ -404,7 +1218,14 @@ trait Trees extends base.Trees { self: Universe =>
val tpt: Tree
}
- override type GenericApply >: Null <: TermTree with GenericApplyApi
+ /** Common base class for Apply and TypeApply.
+ */
+ type GenericApply >: Null <: TermTree with GenericApplyApi
+
+ /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val GenericApplyTag: ClassTag[GenericApply]
/** The API that all applies support */
trait GenericApplyApi extends TermTreeApi { this: GenericApply =>
@@ -412,42 +1233,157 @@ trait Trees extends base.Trees { self: Universe =>
val args: List[Tree]
}
- override type TypeApply >: Null <: GenericApply with TypeApplyApi
+ /* @PP: All signs point toward it being a requirement that args.nonEmpty,
+ * but I can't find that explicitly stated anywhere. Unless your last name
+ * is odersky, you should probably treat it as true.
+ */
+ /** Explicit type application. */
+ type TypeApply >: Null <: GenericApply with TypeApplyApi
+
+ /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeApplyTag: ClassTag[TypeApply]
+
+ /** The constructor/deconstructor for `TypeApply` instances. */
+ val TypeApply: TypeApplyExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeApply(fun, args)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * fun[args]
+ */
+ abstract class TypeApplyExtractor {
+ def apply(fun: Tree, args: List[Tree]): TypeApply
+ def unapply(typeApply: TypeApply): Option[(Tree, List[Tree])]
+ }
/** The API that all type applies support */
trait TypeApplyApi extends GenericApplyApi { this: TypeApply =>
}
- override type Apply >: Null <: GenericApply with ApplyApi
+ /** Value application */
+ type Apply >: Null <: GenericApply with ApplyApi
+
+ /** A tag that preserves the identity of the `Apply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ApplyTag: ClassTag[Apply]
+
+ /** The constructor/deconstructor for `Apply` instances. */
+ val Apply: ApplyExtractor
+
+ /** An extractor class to create and pattern match with syntax `Apply(fun, args)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * fun(args)
+ *
+ * For instance:
+ *
+ * fun[targs](args)
+ *
+ * Is expressed as:
+ *
+ * Apply(TypeApply(fun, targs), args)
+ */
+ abstract class ApplyExtractor {
+ def apply(fun: Tree, args: List[Tree]): Apply
+ def unapply(apply: Apply): Option[(Tree, List[Tree])]
+ }
/** The API that all applies support */
trait ApplyApi extends GenericApplyApi { this: Apply =>
}
- override type ApplyDynamic >: Null <: TermTree with SymTree with ApplyDynamicApi
+ /** Super reference, where `qual` is the corresponding `this` reference.
+ * A super reference `C.super[M]` is represented as `Super(This(C), M)`.
+ */
+ type Super >: Null <: TermTree with SuperApi
- /** The API that all apply dynamics support */
- trait ApplyDynamicApi extends TermTreeApi with SymTreeApi { this: ApplyDynamic =>
- val qual: Tree
- val args: List[Tree]
+ /** A tag that preserves the identity of the `Super` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SuperTag: ClassTag[Super]
+
+ /** The constructor/deconstructor for `Super` instances. */
+ val Super: SuperExtractor
+
+ /** An extractor class to create and pattern match with syntax `Super(qual, mix)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * C.super[M]
+ *
+ * Which is represented as:
+ *
+ * Super(This(C), M)
+ *
+ * If `mix` is empty, it is tpnme.EMPTY.
+ *
+ * The symbol of a Super is the class _from_ which the super reference is made.
+ * For instance in C.super(...), it would be C.
+ */
+ abstract class SuperExtractor {
+ def apply(qual: Tree, mix: TypeName): Super
+ def unapply(super_ : Super): Option[(Tree, TypeName)]
}
- override type Super >: Null <: TermTree with SuperApi
-
/** The API that all supers support */
trait SuperApi extends TermTreeApi { this: Super =>
val qual: Tree
val mix: TypeName
}
- override type This >: Null <: TermTree with SymTree with ThisApi
+ /** Self reference */
+ type This >: Null <: TermTree with SymTree with ThisApi
+
+ /** A tag that preserves the identity of the `This` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ThisTag: ClassTag[This]
+
+ /** The constructor/deconstructor for `This` instances. */
+ val This: ThisExtractor
+
+ /** An extractor class to create and pattern match with syntax `This(qual)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * qual.this
+ *
+ * The symbol of a This is the class to which the this refers.
+ * For instance in C.this, it would be C.
+ *
+ * If `mix` is empty, then ???
+ */
+ abstract class ThisExtractor {
+ def apply(qual: TypeName): This
+ def unapply(this_ : This): Option[TypeName]
+ }
/** The API that all thises support */
trait ThisApi extends TermTreeApi with SymTreeApi { this: This =>
val qual: TypeName
}
- override type Select >: Null <: RefTree with SelectApi
+ /** Designator <qualifier> . <name> */
+ type Select >: Null <: RefTree with SelectApi
+
+ /** A tag that preserves the identity of the `Select` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SelectTag: ClassTag[Select]
+
+ /** The constructor/deconstructor for `Select` instances. */
+ val Select: SelectExtractor
+
+ /** An extractor class to create and pattern match with syntax `Select(qual, name)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * qualifier.selector
+ */
+ abstract class SelectExtractor {
+ def apply(qualifier: Tree, name: Name): Select
+ def unapply(select: Select): Option[(Tree, Name)]
+ }
/** The API that all selects support */
trait SelectApi extends RefTreeApi { this: Select =>
@@ -455,28 +1391,132 @@ trait Trees extends base.Trees { self: Universe =>
val name: Name
}
- override type Ident >: Null <: RefTree with IdentApi
+ /** Identifier <name> */
+ type Ident >: Null <: RefTree with IdentApi
+
+ /** A tag that preserves the identity of the `Ident` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val IdentTag: ClassTag[Ident]
+
+ /** The constructor/deconstructor for `Ident` instances. */
+ val Ident: IdentExtractor
+
+ /** An extractor class to create and pattern match with syntax `Ident(qual, name)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * name
+ *
+ * Type checker converts idents that refer to enclosing fields or methods to selects.
+ * For example, name ==> this.name
+ */
+ abstract class IdentExtractor {
+ def apply(name: Name): Ident
+ def unapply(ident: Ident): Option[Name]
+ }
/** The API that all idents support */
trait IdentApi extends RefTreeApi { this: Ident =>
val name: Name
}
- override type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi
+ /** Marks underlying reference to id as boxed.
+ * @pre id must refer to a captured variable
+ * A reference such marked will refer to the boxed entity, no dereferencing
+ * with `.elem` is done on it.
+ * This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
+ * It is eliminated in LambdaLift, where the boxing conversion takes place.
+ */
+ type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi
+
+ /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
+
+ /** The constructor/deconstructor for `ReferenceToBoxed` instances. */
+ val ReferenceToBoxed: ReferenceToBoxedExtractor
+
+ /** An extractor class to create and pattern match with syntax `ReferenceToBoxed(ident)`.
+ * This AST node does not have direct correspondence to Scala code,
+ * and is emitted by macros to reference capture vars directly without going through `elem`.
+ *
+ * For example:
+ *
+ * var x = ...
+ * fun { x }
+ *
+ * Will emit:
+ *
+ * Ident(x)
+ *
+ * Which gets transformed to:
+ *
+ * Select(Ident(x), "elem")
+ *
+ * If `ReferenceToBoxed` were used instead of Ident, no transformation would be performed.
+ */
+ abstract class ReferenceToBoxedExtractor {
+ def apply(ident: Ident): ReferenceToBoxed
+ def unapply(referenceToBoxed: ReferenceToBoxed): Option[Ident]
+ }
/** The API that all references support */
trait ReferenceToBoxedApi extends TermTreeApi { this: ReferenceToBoxed =>
val ident: Tree
}
- override type Literal >: Null <: TermTree with LiteralApi
+ /** Literal */
+ type Literal >: Null <: TermTree with LiteralApi
+
+ /** A tag that preserves the identity of the `Literal` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val LiteralTag: ClassTag[Literal]
+
+ /** The constructor/deconstructor for `Literal` instances. */
+ val Literal: LiteralExtractor
+
+ /** An extractor class to create and pattern match with syntax `Literal(value)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * value
+ */
+ abstract class LiteralExtractor {
+ def apply(value: Constant): Literal
+ def unapply(literal: Literal): Option[Constant]
+ }
/** The API that all literals support */
trait LiteralApi extends TermTreeApi { this: Literal =>
val value: Constant
}
- override type Annotated >: Null <: Tree with AnnotatedApi
+ /** A tree that has an annotation attached to it. Only used for annotated types and
+ * annotation ascriptions, annotations on definitions are stored in the Modifiers.
+ * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
+ * an AnnotatedType.
+ */
+ type Annotated >: Null <: AnyRef with Tree with AnnotatedApi
+
+ /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AnnotatedTag: ClassTag[Annotated]
+
+ /** The constructor/deconstructor for `Annotated` instances. */
+ val Annotated: AnnotatedExtractor
+
+ /** An extractor class to create and pattern match with syntax `Annotated(annot, arg)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * arg @annot // for types
+ * arg: @annot // for exprs
+ */
+ abstract class AnnotatedExtractor {
+ def apply(annot: Tree, arg: Tree): Annotated
+ def unapply(annotated: Annotated): Option[(Tree, Tree)]
+ }
/** The API that all annotateds support */
trait AnnotatedApi extends TreeApi { this: Annotated =>
@@ -484,14 +1524,55 @@ trait Trees extends base.Trees { self: Universe =>
val arg: Tree
}
- override type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi
+ /** Singleton type, eliminated by RefCheck */
+ type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi
+
+ /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
+
+ /** The constructor/deconstructor for `SingletonTypeTree` instances. */
+ val SingletonTypeTree: SingletonTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SingletonTypeTree(ref)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * ref.type
+ */
+ abstract class SingletonTypeTreeExtractor {
+ def apply(ref: Tree): SingletonTypeTree
+ def unapply(singletonTypeTree: SingletonTypeTree): Option[Tree]
+ }
/** The API that all singleton type trees support */
trait SingletonTypeTreeApi extends TypTreeApi { this: SingletonTypeTree =>
val ref: Tree
}
- override type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi
+ /** Type selection <qualifier> # <name>, eliminated by RefCheck */
+ // [Eugene++] don't see why we need it, when we have Select
+ type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi
+
+ /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
+
+ /** The constructor/deconstructor for `SelectFromTypeTree` instances. */
+ val SelectFromTypeTree: SelectFromTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SelectFromTypeTree(qualifier, name)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * qualifier # selector
+ *
+ * Note: a path-dependent type p.T is expressed as p.type # T
+ */
+ abstract class SelectFromTypeTreeExtractor {
+ def apply(qualifier: Tree, name: TypeName): SelectFromTypeTree
+ def unapply(selectFromTypeTree: SelectFromTypeTree): Option[(Tree, TypeName)]
+ }
/** The API that all selects from type trees support */
trait SelectFromTypeTreeApi extends TypTreeApi with RefTreeApi { this: SelectFromTypeTree =>
@@ -499,14 +1580,52 @@ trait Trees extends base.Trees { self: Universe =>
val name: TypeName
}
- override type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi
+ /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
+ type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi
+
+ /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
+
+ /** The constructor/deconstructor for `CompoundTypeTree` instances. */
+ val CompoundTypeTree: CompoundTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `CompoundTypeTree(templ)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * parent1 with ... with parentN { refinement }
+ */
+ abstract class CompoundTypeTreeExtractor {
+ def apply(templ: Template): CompoundTypeTree
+ def unapply(compoundTypeTree: CompoundTypeTree): Option[Template]
+ }
/** The API that all compound type trees support */
trait CompoundTypeTreeApi extends TypTreeApi { this: CompoundTypeTree =>
val templ: Template
}
- override type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi
+ /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
+ type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi
+
+ /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
+
+ /** The constructor/deconstructor for `AppliedTypeTree` instances. */
+ val AppliedTypeTree: AppliedTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `AppliedTypeTree(tpt, args)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * tpt[args]
+ */
+ abstract class AppliedTypeTreeExtractor {
+ def apply(tpt: Tree, args: List[Tree]): AppliedTypeTree
+ def unapply(appliedTypeTree: AppliedTypeTree): Option[(Tree, List[Tree])]
+ }
/** The API that all applied type trees support */
trait AppliedTypeTreeApi extends TypTreeApi { this: AppliedTypeTree =>
@@ -514,7 +1633,26 @@ trait Trees extends base.Trees { self: Universe =>
val args: List[Tree]
}
- override type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi
+ /** Document me! */
+ type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi
+
+ /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
+
+ /** The constructor/deconstructor for `TypeBoundsTree` instances. */
+ val TypeBoundsTree: TypeBoundsTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeBoundsTree(lo, hi)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * >: lo <: hi
+ */
+ abstract class TypeBoundsTreeExtractor {
+ def apply(lo: Tree, hi: Tree): TypeBoundsTree
+ def unapply(typeBoundsTree: TypeBoundsTree): Option[(Tree, Tree)]
+ }
/** The API that all type bound trees support */
trait TypeBoundsTreeApi extends TypTreeApi { this: TypeBoundsTree =>
@@ -522,7 +1660,26 @@ trait Trees extends base.Trees { self: Universe =>
val hi: Tree
}
- override type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi
+ /** Document me! */
+ type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi
+
+ /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
+
+ /** The constructor/deconstructor for `ExistentialTypeTree` instances. */
+ val ExistentialTypeTree: ExistentialTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ExistentialTypeTree(tpt, whereClauses)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * tpt forSome { whereClauses }
+ */
+ abstract class ExistentialTypeTreeExtractor {
+ def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
+ def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])]
+ }
/** The API that all existential type trees support */
trait ExistentialTypeTreeApi extends TypTreeApi { this: ExistentialTypeTree =>
@@ -530,7 +1687,29 @@ trait Trees extends base.Trees { self: Universe =>
val whereClauses: List[Tree]
}
- override type TypeTree >: Null <: TypTree with TypeTreeApi
+ /** A synthetic tree holding an arbitrary type. Not to be confused with
+ * with TypTree, the trait for trees that are only used for type trees.
+ * TypeTree's are inserted in several places, but most notably in
+ * `RefCheck`, where the arbitrary type trees are all replaced by
+ * TypeTree's. */
+ type TypeTree >: Null <: TypTree with TypeTreeApi
+
+ /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeTreeTag: ClassTag[TypeTree]
+
+ /** The constructor/deconstructor for `TypeTree` instances. */
+ val TypeTree: TypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeTree()`.
+ * This AST node does not have direct correspondence to Scala code,
+ * and is emitted by everywhere when we want to wrap a `Type` in a `Tree`.
+ */
+ abstract class TypeTreeExtractor {
+ def apply(): TypeTree
+ def unapply(typeTree: TypeTree): Boolean
+ }
/** The API that all type trees support */
trait TypeTreeApi extends TypTreeApi { this: TypeTree =>
@@ -544,6 +1723,83 @@ trait Trees extends base.Trees { self: Universe =>
*/
val emptyValDef: ValDef
+// ---------------------- factories ----------------------------------------------
+
+ /** @param sym the class symbol
+ * @param impl the implementation template
+ */
+ def ClassDef(sym: Symbol, impl: Template): ClassDef
+
+ /**
+ * @param sym the class symbol
+ * @param impl the implementation template
+ */
+ def ModuleDef(sym: Symbol, impl: Template): ModuleDef
+
+ def ValDef(sym: Symbol, rhs: Tree): ValDef
+
+ def ValDef(sym: Symbol): ValDef
+
+ def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef
+
+ def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef
+
+ def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef
+
+ def DefDef(sym: Symbol, rhs: Tree): DefDef
+
+ def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef
+
+ /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
+ def TypeDef(sym: Symbol, rhs: Tree): TypeDef
+
+ /** A TypeDef node which defines abstract type or type parameter for given `sym` */
+ def TypeDef(sym: Symbol): TypeDef
+
+ def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef
+
+ /** Block factory that flattens directly nested blocks.
+ */
+ def Block(stats: Tree*): Block
+
+ /** casedef shorthand */
+ def CaseDef(pat: Tree, body: Tree): CaseDef
+
+ def Bind(sym: Symbol, body: Tree): Bind
+
+ def Try(body: Tree, cases: (Tree, Tree)*): Try
+
+ def Throw(tpe: Type, args: Tree*): Throw
+
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
+ */
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree
+
+ /** 0-1 argument list new, based on a type.
+ */
+ def New(tpe: Type, args: Tree*): Tree
+
+ def New(sym: Symbol, args: Tree*): Tree
+
+ def Apply(sym: Symbol, args: Tree*): Tree
+
+ def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
+
+ def Super(sym: Symbol, mix: TypeName): Tree
+
+ def This(sym: Symbol): Tree
+
+ def Select(qualifier: Tree, name: String): Select
+
+ def Select(qualifier: Tree, sym: Symbol): Select
+
+ def Ident(name: String): Ident
+
+ def Ident(sym: Symbol): Ident
+
+ def TypeTree(tp: Type): TypeTree
+
// ---------------------- copying ------------------------------------------------
/** The standard (lazy) tree copier
@@ -573,7 +1829,6 @@ trait Trees extends base.Trees { self: Universe =>
def Star(tree: Tree, elem: Tree): Star
def Bind(tree: Tree, name: Name, body: Tree): Bind
def UnApply(tree: Tree, fun: Tree, args: List[Tree]): UnApply
- def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
def Function(tree: Tree, vparams: List[ValDef], body: Tree): Function
def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign
def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
@@ -586,7 +1841,6 @@ trait Trees extends base.Trees { self: Universe =>
def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply
def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply
- def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
def Super(tree: Tree, qual: Tree, mix: TypeName): Super
def This(tree: Tree, qual: Name): This
def Select(tree: Tree, qualifier: Tree, selector: Name): Select
@@ -691,9 +1945,35 @@ trait Trees extends base.Trees { self: Universe =>
protected def xtransform(transformer: Transformer, tree: Tree): Tree = throw new MatchError(tree)
- type Modifiers >: Null <: ModifiersApi
- abstract class ModifiersApi extends ModifiersBase
+ /** ... */
+ type Modifiers >: Null <: AnyRef with ModifiersApi
-}
+ /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ModifiersTag: ClassTag[Modifiers]
+
+ /** ... */
+ abstract class ModifiersApi {
+ def flags: FlagSet // default: NoFlags
+ def hasFlag(flag: FlagSet): Boolean
+ def privateWithin: Name // default: EmptyTypeName
+ def annotations: List[Tree] // default: List()
+ def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
+ Modifiers(flags, privateWithin, f(annotations))
+ }
+
+ val Modifiers: ModifiersCreator
+ abstract class ModifiersCreator {
+ def apply(): Modifiers = Modifiers(NoFlags, EmptyTypeName, List())
+ def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
+ }
+
+ def Modifiers(flags: FlagSet, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List())
+ def Modifiers(flags: FlagSet): Modifiers = Modifiers(flags, EmptyTypeName)
+
+ /** ... */
+ lazy val NoMods = Modifiers()
+}
diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala
new file mode 100644
index 0000000000..2b3ef4320b
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TypeCreator.scala
@@ -0,0 +1,26 @@
+package scala.reflect
+package api
+
+/** A mirror-aware factory for types.
+ *
+ * In the reflection API, artifacts are specific to universes and
+ * symbolic references used in artifacts (e.g. `scala.Int`) are resolved by mirrors.
+ *
+ * Therefore to build a type one needs to know a universe that the type is going to be bound to
+ * and a mirror that is going to resolve symbolic references (e.g. to determine that `scala.Int`
+ * points to a core class `Int` from scala-library.jar).
+ *
+ * `TypeCreator` implements this notion by providing a standalone type factory.
+ *
+ * This is immediately useful for type tags. When the compiler creates a type tag,
+ * the end result needs to make sense in any mirror. That's because the compiler knows
+ * the universe it's creating a type tag for (since `TypeTag` is path-dependent on a universe),
+ * but it cannot know in advance the mirror to instantiate the result in (e.g. on JVM
+ * it doesn't know what classloader use to resolve symbolic names in the type tag).
+ *
+ * Due to a typechecker restriction (no eta-expansion for dependent method types),
+ * `TypeCreator` can't have a functional type, so it's implemented as class with an apply method.
+ */
+abstract class TypeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type
+}
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
new file mode 100644
index 0000000000..fc3f067a96
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -0,0 +1,352 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package reflect
+package api
+
+import java.lang.{ Class => jClass }
+import scala.language.implicitConversions
+
+/*
+ * TODO
+ * add @see to docs about universes
+ * [Eugene++] also mention sensitivity to prefixes, i.e. that rb.TypeTag is different from ru.TypeTag
+ * [Chris++] tag.in(some mirror) or expr.in(some mirror) (does not work for tag and exprs in macros)
+ * Backwards compat item1: [Eugene++] it might be useful, though, to guard against abstractness of the incoming type.
+ */
+/**
+ * A type tag encapsulates a representation of type T.
+ *
+ * Type tags replace the pre-2.10 concept of a [[scala.reflect.Manifest]] and are integrated with reflection.
+ *
+ * === Overview and examples ===
+ *
+ * Type tags are organized in a hierarchy of three classes:
+ * [[scala.reflect.ClassTag]], [[scala.reflect.api.Universe#TypeTag]] and [[scala.reflect.api.Universe#WeakTypeTag]].
+ *
+ * @see [[scala.reflect.ClassTag]], [[scala.reflect.api.Universe#TypeTag]], [[scala.reflect.api.Universe#WeakTypeTag]]
+ *
+ * Examples:
+ * {{{
+ * scala> class Person
+ * scala> class Container[T]
+ * scala> import scala.reflect.ClassTag
+ * scala> import scala.reflect.runtime.universe.TypeTag
+ * scala> import scala.reflect.runtime.universe.WeakTypeTag
+ * scala> def firstTypeArg( tag: WeakTypeTag[_] ) = (tag.tpe match {case TypeRef(_,_,typeArgs) => typeArgs})(0)
+ * }}}
+ * TypeTag contains concrete type arguments:
+ * {{{
+ * scala> firstTypeArg( implicitly[TypeTag[Container[Person]]] )
+ * res0: reflect.runtime.universe.Type = Person
+ * }}}
+ * TypeTag guarantees concrete type arguments (fails for references to unbound type arguments):
+ * {{{
+ * scala> def foo1[T] = implicitly[TypeTag[Container[T]]]
+ * <console>:11: error: No TypeTag available for Container[T]
+ * def foo1[T] = implicitly[TypeTag[Container[T]]]
+ * }}}
+ * WeakTypeTag allows references to unbound type arguments:
+ * {{{
+ * scala> def foo2[T] = firstTypeArg( implicitly[WeakTypeTag[Container[T]]] )
+ * foo2: [T]=> reflect.runtime.universe.Type
+ * scala> foo2[Person]
+ * res1: reflect.runtime.universe.Type = T
+ * }}}
+ * TypeTag allows unbound type arguments for which type tags are available:
+ * {{{
+ * scala> def foo3[T:TypeTag] = firstTypeArg( implicitly[TypeTag[Container[T]]] )
+ * foo3: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Type
+ * scala> foo3[Person]
+ * res1: reflect.runtime.universe.Type = Person
+ * }}}
+ * WeakTypeTag contains concrete type arguments if available via existing tags:
+ * {{{
+ * scala> def foo4[T:WeakTypeTag] = firstTypeArg( implicitly[WeakTypeTag[Container[T]]] )
+ * foo4: [T](implicit evidence$1: reflect.runtime.universe.WeakTypeTag[T])reflect.runtime.universe.Type
+ * scala> foo4[Person]
+ * res1: reflect.runtime.universe.Type = Person
+ * }}}
+ *
+ *
+ * [[scala.reflect.api.Universe#TypeTag]] and [[scala.reflect.api.Universe#WeakTypeTag]] are path dependent on their universe.
+ *
+ * The default universe is [[scala.reflect.runtime.universe]]
+ *
+ * Type tags can be migrated to another universe given the corresponding mirror using
+ *
+ * {{{
+ * tag.in( other_mirror )
+ * }}}
+ *
+ * See [[scala.reflect.api.TypeTags#WeakTypeTag.in]]
+ *
+ * === WeakTypeTag vs TypeTag ===
+ *
+ * Be careful with WeakTypeTag, because it will reify types even if these types are abstract.
+ * This makes it easy to forget to tag one of the methods in the call chain and discover it much later in the runtime
+ * by getting cryptic errors far away from their source. For example, consider the following snippet:
+ *
+ * {{{
+ * def bind[T: WeakTypeTag](name: String, value: T): IR.Result = bind((name, value))
+ * def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ * object NamedParam {
+ * implicit def namedValue[T: WeakTypeTag](name: String, x: T): NamedParam = apply(name, x)
+ * def apply[T: WeakTypeTag](name: String, x: T): NamedParam = new Typed[T](name, x)
+ * }
+ * }}}
+ *
+ * This fragment of the Scala REPL implementation defines a `bind` function that carries a named value along with its type
+ * into the heart of the REPL. Using a [[scala.reflect.api.Universe#WeakTypeTag]] here is reasonable, because it is desirable
+ * to work with all types, even if they are type parameters or abstract type members.
+ *
+ * However if any of the three `WeakTypeTag` context bounds is omitted, the resulting code will be incorrect,
+ * because the missing `WeakTypeTag` will be transparently generated by the compiler, carrying meaningless information.
+ * Most likely, this problem will manifest itself elsewhere, making debugging complicated.
+ * If `WeakTypeTag` context bounds were replaced with `TypeTag`, then such errors would be reported statically.
+ * But in that case we wouldn't be able to use `bind` in arbitrary contexts.
+ *
+ * === Backward compatibility with Manifests ===
+ *
+ * Type tags correspond loosely to manifests.
+ *
+ * More precisely:
+ * The previous notion of a [[scala.reflect.ClassManifest]] corresponds to a scala.reflect.ClassTag,
+ * The previous notion of a [[scala.reflect.Manifest]] corresponds to scala.reflect.runtime.universe.TypeTag,
+ *
+ * In Scala 2.10, manifests are deprecated, so it's advisable to migrate them to tags,
+ * because manifests will probably be removed in the next major release.
+ *
+ * In most cases it will be enough to replace ClassManifest with ClassTag and Manifest with TypeTag.
+ * There are however a few caveats:
+ *
+ * 1) The notion of OptManifest is no longer supported. Tags can reify arbitrary types, so they are always available.
+ *
+ * 2) There's no equivalent for AnyValManifest. Consider comparing your tag with one of the base tags
+ * (defined in the corresponding companion objects) to find out whether it represents a primitive value class.
+ * You can also use `<tag>.tpe.typeSymbol.isPrimitiveValueClass` for that purpose (requires scala-reflect.jar).
+ *
+ * 3) There's no replacement for factory methods defined in `ClassManifest` and `Manifest` companion objects.
+ * Consider assembling corresponding types using the reflection APIs provided by Java (for classes) and Scala (for types).
+ *
+ * 4) Certain manifest functions (such as `<:<`, `>:>` and `typeArguments`) weren't included in the tag API.
+ * Consider using the reflection APIs provided by Java (for classes) and Scala (for types) instead.
+ */
+trait TypeTags { self: Universe =>
+
+ import definitions._
+
+ /**
+ * If an implicit value of type WeakTypeTag[T] is required, the compiler will create one.
+ * A reflective representation of T can be accessed via the tpe field.
+ * Components of T can be references to type parameters or abstract types. WeakTypeTag makes an effort to
+ * be as concrete as possible, i.e. if type tags are available for the referenced type arguments or abstract types,
+ * they are used to embed the concrete types into the WeakTypeTag. Otherwise the WeakTypeTag will contain a reference
+ * to an abstract type. This behavior can be useful, when one expects T to be possibly partially abstract, but
+ * requires special care to handle this case. If however T is expected to be fully known, use
+ * [[scala.reflect.api.Universe#TypeTag]] instead, which statically guarantees this property.
+ *
+ * @see [[scala.reflect.api.TypeTags]]
+ */
+ @annotation.implicitNotFound(msg = "No WeakTypeTag available for ${T}")
+ trait WeakTypeTag[T] extends Equals with Serializable {
+ /**
+ * Underlying mirror of this type tag.
+ */
+ val mirror: Mirror
+
+ /**
+ * Migrates the expression into another mirror, jumping into a different universe if necessary.
+ *
+ * Migration means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # WeakTypeTag[T]
+
+ /**
+ * Reflective representation of type T.
+ */
+ def tpe: Type
+
+ // case class accessories
+ override def canEqual(x: Any) = x.isInstanceOf[WeakTypeTag[_]]
+ override def equals(x: Any) = x.isInstanceOf[WeakTypeTag[_]] && this.mirror == x.asInstanceOf[WeakTypeTag[_]].mirror && this.tpe == x.asInstanceOf[WeakTypeTag[_]].tpe
+ override def hashCode = mirror.hashCode * 31 + tpe.hashCode
+ override def toString = "WeakTypeTag[" + tpe + "]"
+ }
+
+ /**
+ * Type tags corresponding to primitive types and constructor/extractor for WeakTypeTags.
+ */
+ object WeakTypeTag {
+ val Byte : WeakTypeTag[scala.Byte] = TypeTag.Byte
+ val Short : WeakTypeTag[scala.Short] = TypeTag.Short
+ val Char : WeakTypeTag[scala.Char] = TypeTag.Char
+ val Int : WeakTypeTag[scala.Int] = TypeTag.Int
+ val Long : WeakTypeTag[scala.Long] = TypeTag.Long
+ val Float : WeakTypeTag[scala.Float] = TypeTag.Float
+ val Double : WeakTypeTag[scala.Double] = TypeTag.Double
+ val Boolean : WeakTypeTag[scala.Boolean] = TypeTag.Boolean
+ val Unit : WeakTypeTag[scala.Unit] = TypeTag.Unit
+ val Any : WeakTypeTag[scala.Any] = TypeTag.Any
+ val AnyVal : WeakTypeTag[scala.AnyVal] = TypeTag.AnyVal
+ val AnyRef : WeakTypeTag[scala.AnyRef] = TypeTag.AnyRef
+ val Object : WeakTypeTag[java.lang.Object] = TypeTag.Object
+ val Nothing : WeakTypeTag[scala.Nothing] = TypeTag.Nothing
+ val Null : WeakTypeTag[scala.Null] = TypeTag.Null
+
+
+ def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
+ tpec1(mirror1) match {
+ case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
+ case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]]
+ case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]]
+ case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]]
+ case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]]
+ case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]]
+ case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]]
+ case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]]
+ case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]]
+ case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]]
+ case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]]
+ case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]]
+ case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]]
+ case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]]
+ case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]]
+ case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
+ }
+
+ def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
+ }
+
+ private class WeakTypeTagImpl[T](val mirror: Mirror, val tpec: TypeCreator) extends WeakTypeTag[T] {
+ lazy val tpe: Type = tpec(mirror)
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # WeakTypeTag[T] = {
+ val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
+ otherMirror.universe.WeakTypeTag[T](otherMirror1, tpec)
+ }
+ private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false)
+ }
+
+ /**
+ * A `TypeTag` is a [[scala.reflect.api.Universe#WeakTypeTag]] with the additional
+ * static guarantee that all type references are concrete, i.e. it does <b>not</b> contain any references to
+ * unresolved type parameters or abstract types.
+ *
+ * @see [[scala.reflect.api.TypeTags]]
+ */
+ @annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
+ trait TypeTag[T] extends WeakTypeTag[T] with Equals with Serializable {
+ /**
+ * @inheritdoc
+ */
+ override def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # TypeTag[T]
+
+ // case class accessories
+ override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
+ override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.mirror == x.asInstanceOf[TypeTag[_]].mirror && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
+ override def hashCode = mirror.hashCode * 31 + tpe.hashCode
+ override def toString = "TypeTag[" + tpe + "]"
+ }
+
+ object TypeTag {
+ val Byte: TypeTag[scala.Byte] = new PredefTypeTag[scala.Byte] (ByteTpe, _.TypeTag.Byte)
+ val Short: TypeTag[scala.Short] = new PredefTypeTag[scala.Short] (ShortTpe, _.TypeTag.Short)
+ val Char: TypeTag[scala.Char] = new PredefTypeTag[scala.Char] (CharTpe, _.TypeTag.Char)
+ val Int: TypeTag[scala.Int] = new PredefTypeTag[scala.Int] (IntTpe, _.TypeTag.Int)
+ val Long: TypeTag[scala.Long] = new PredefTypeTag[scala.Long] (LongTpe, _.TypeTag.Long)
+ val Float: TypeTag[scala.Float] = new PredefTypeTag[scala.Float] (FloatTpe, _.TypeTag.Float)
+ val Double: TypeTag[scala.Double] = new PredefTypeTag[scala.Double] (DoubleTpe, _.TypeTag.Double)
+ val Boolean: TypeTag[scala.Boolean] = new PredefTypeTag[scala.Boolean] (BooleanTpe, _.TypeTag.Boolean)
+ val Unit: TypeTag[scala.Unit] = new PredefTypeTag[scala.Unit] (UnitTpe, _.TypeTag.Unit)
+ val Any: TypeTag[scala.Any] = new PredefTypeTag[scala.Any] (AnyTpe, _.TypeTag.Any)
+ val AnyVal: TypeTag[scala.AnyVal] = new PredefTypeTag[scala.AnyVal] (AnyValTpe, _.TypeTag.AnyVal)
+ val AnyRef: TypeTag[scala.AnyRef] = new PredefTypeTag[scala.AnyRef] (AnyRefTpe, _.TypeTag.AnyRef)
+ val Object: TypeTag[java.lang.Object] = new PredefTypeTag[java.lang.Object] (ObjectTpe, _.TypeTag.Object)
+ val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing)
+ val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
+
+ def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] =
+ tpec1(mirror1) match {
+ case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
+ case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
+ case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
+ case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]]
+ case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]]
+ case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]]
+ case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]]
+ case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
+ case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
+ case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
+ case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
+ case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
+ case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
+ case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
+ case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
+ case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
+ }
+
+ def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
+ }
+
+ private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends WeakTypeTagImpl[T](mirror, tpec) with TypeTag[T] {
+ override def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # TypeTag[T] = {
+ val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
+ otherMirror.universe.TypeTag[T](otherMirror1, tpec)
+ }
+ private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
+ }
+
+ private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = {
+ copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
+ }
+ }
+
+ private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
+ override lazy val tpe: Type = _tpe
+ private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
+ }
+
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]]`
+ */
+ def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]]`
+ */
+ def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+
+ // big thanks to Viktor Klang for this brilliant idea!
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]].tpe`
+ */
+ def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]].tpe`
+ */
+ def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+}
+
+private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable {
+ private def writeObject(out: java.io.ObjectOutputStream): Unit = {
+ out.writeObject(tpec)
+ out.writeBoolean(concrete)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream): Unit = {
+ tpec = in.readObject().asInstanceOf[TypeCreator]
+ concrete = in.readBoolean()
+ }
+
+ private def readResolve(): AnyRef = {
+ import scala.reflect.runtime.universe._
+ if (concrete) TypeTag(rootMirror, tpec)
+ else WeakTypeTag(rootMirror, tpec)
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index 199cf9b9e5..af70c9e761 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -1,13 +1,39 @@
package scala.reflect
package api
-trait Types extends base.Types { self: Universe =>
+/**
+ * Defines the type hierachy for types.
+ *
+ * Note: Because of implementation details, some type factories have return type `Type`
+ * instead of a more precise type.
+ *
+ * @see [[scala.reflect]] for a description on how the class hierarchy is encoded here.
+ */
+trait Types { self: Universe =>
+
+ /** The type of Scala types, and also Scala type signatures.
+ * (No difference is internally made between the two).
+ */
+ type Type >: Null <: TypeApi
+
+ /** A tag that preserves the identity of the `Type` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeTagg: ClassTag[Type]
+
+ /** This constant is used as a special value that indicates that no meaningful type exists.
+ */
+ val NoType: Type
- override type Type >: Null <: TypeApi
+ /** This constant is used as a special value denoting the empty prefix in a path dependent type.
+ * For instance `x.type` is represented as `SingleType(NoPrefix, <x>)`, where `<x>` stands for
+ * the symbol for `x`.
+ */
+ val NoPrefix: Type
- /** The extended API of types
+ /** The API of types
*/
- abstract class TypeApi extends TypeBase {
+ abstract class TypeApi {
/** The term symbol associated with the type, or `NoSymbol` for types
* that do not refer to a term symbol.
*/
@@ -66,6 +92,10 @@ trait Types extends base.Types { self: Universe =>
/** Does this type conform to given type argument `that`? */
def <:< (that: Type): Boolean
+ /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
+ */
+ def weak_<:<(that: Type): Boolean
+
/** Is this type equivalent to given type argument `that`? */
def =:= (that: Type): Boolean
@@ -117,15 +147,6 @@ trait Types extends base.Types { self: Universe =>
*/
def widen: Type
- /** Map to a singleton type which is a subtype of this type.
- * The fallback implemented here gives:
- * {{{
- * T.narrow = (T {}).this.type
- * }}}
- * Overridden where we know more about where types come from.
- */
- def narrow: Type
-
/******************* helpers *******************/
/** Substitute symbols in `to` for corresponding occurrences of references to
@@ -155,47 +176,179 @@ trait Types extends base.Types { self: Universe =>
/** Does this type contain a reference to given symbol? */
def contains(sym: Symbol): Boolean
-
- /** The string discriminator of this type; useful for debugging */
- def kind: String
}
- /** .. */
- override type ThisType >: Null <: SingletonType with ThisTypeApi
+ /** The type of Scala singleton types, i.e., types that are inhabited
+ * by only one nun-null value. These include types of the forms
+ * {{{
+ * C.this.type
+ * C.super.type
+ * x.type
+ * }}}
+ * as well as [[ConstantType constant types]].
+ */
+ type SingletonType >: Null <: Type
+
+ /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SingletonTypeTag: ClassTag[SingletonType]
+
+ /** A singleton type that describes types of the form on the left with the
+ * corresponding `ThisType` representation to the right:
+ * {{{
+ * C.this.type ThisType(C)
+ * }}}
+ */
+ type ThisType >: Null <: AnyRef with SingletonType with ThisTypeApi
+
+ /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ThisTypeTag: ClassTag[ThisType]
+
+ /** The constructor/deconstructor for `ThisType` instances. */
+ val ThisType: ThisTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ThisType(sym)`
+ * where `sym` is the class prefix of the this type.
+ */
+ abstract class ThisTypeExtractor {
+ /**
+ * Creates a ThisType from the given class symbol.
+ */
+ def apply(sym: Symbol): Type
+ def unapply(tpe: ThisType): Option[Symbol]
+ }
/** The API that all this types support */
trait ThisTypeApi extends TypeApi { this: ThisType =>
val sym: Symbol
}
- /** .. */
- override type SingleType >: Null <: SingletonType with SingleTypeApi
+ /** The `SingleType` type describes types of any of the forms on the left,
+ * with their TypeRef representations to the right.
+ * {{{
+ * (T # x).type SingleType(T, x)
+ * p.x.type SingleType(p.type, x)
+ * x.type SingleType(NoPrefix, x)
+ * }}}
+ */
+ type SingleType >: Null <: AnyRef with SingletonType with SingleTypeApi
+
+ /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SingleTypeTag: ClassTag[SingleType]
+
+ /** The constructor/deconstructor for `SingleType` instances. */
+ val SingleType: SingleTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SingleType(pre, sym)`
+ * Here, `pre` is the prefix of the single-type, and `sym` is the stable value symbol
+ * referred to by the single-type.
+ */
+ abstract class SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type // not SingleTypebecause of implementation details
+ def unapply(tpe: SingleType): Option[(Type, Symbol)]
+ }
/** The API that all single types support */
trait SingleTypeApi extends TypeApi { this: SingleType =>
val pre: Type
val sym: Symbol
}
+ /** The `SuperType` type is not directly written, but arises when `C.super` is used
+ * as a prefix in a `TypeRef` or `SingleType`. It's internal presentation is
+ * {{{
+ * SuperType(thistpe, supertpe)
+ * }}}
+ * Here, `thistpe` is the type of the corresponding this-type. For instance,
+ * in the type arising from C.super, the `thistpe` part would be `ThisType(C)`.
+ * `supertpe` is the type of the super class referred to by the `super`.
+ */
+ type SuperType >: Null <: AnyRef with SingletonType with SuperTypeApi
- /** .. */
- override type SuperType >: Null <: SingletonType with SuperTypeApi
+ /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val SuperTypeTag: ClassTag[SuperType]
+
+ /** The constructor/deconstructor for `SuperType` instances. */
+ val SuperType: SuperTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SingleType(thistpe, supertpe)`
+ */
+ abstract class SuperTypeExtractor {
+ def apply(thistpe: Type, supertpe: Type): Type // not SuperTypebecause of implementation details
+ def unapply(tpe: SuperType): Option[(Type, Type)]
+ }
/** The API that all super types support */
trait SuperTypeApi extends TypeApi { this: SuperType =>
val thistpe: Type
val supertpe: Type
}
+ /** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant.
+ * The REPL expresses constant types like `Int(11)`. Here are some constants with their types:
+ * {{{
+ * 1 ConstantType(Constant(1))
+ * "abc" ConstantType(Constant("abc"))
+ * }}}
+ */
+ type ConstantType >: Null <: AnyRef with SingletonType with ConstantTypeApi
+
+ /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ConstantTypeTag: ClassTag[ConstantType]
- /** .. */
- override type ConstantType >: Null <: SingletonType with ConstantTypeApi
+ /** The constructor/deconstructor for `ConstantType` instances. */
+ val ConstantType: ConstantTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ConstantType(constant)`
+ * Here, `constant` is the constant value represented by the type.
+ */
+ abstract class ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType
+ def unapply(tpe: ConstantType): Option[Constant]
+ }
/** The API that all constant types support */
trait ConstantTypeApi extends TypeApi { this: ConstantType =>
val value: Constant
}
- /** .. */
- override type TypeRef >: Null <: Type with TypeRefApi
+ /** The `TypeRef` type describes types of any of the forms on the left,
+ * with their TypeRef representations to the right.
+ * {{{
+ * T # C[T_1, ..., T_n] TypeRef(T, C, List(T_1, ..., T_n))
+ * p.C[T_1, ..., T_n] TypeRef(p.type, C, List(T_1, ..., T_n))
+ * C[T_1, ..., T_n] TypeRef(NoPrefix, C, List(T_1, ..., T_n))
+ * T # C TypeRef(T, C, Nil)
+ * p.C TypeRef(p.type, C, Nil)
+ * C TypeRef(NoPrefix, C, Nil)
+ * }}}
+ */
+ type TypeRef >: Null <: AnyRef with Type with TypeRefApi
+
+ /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeRefTag: ClassTag[TypeRef]
+
+ /** The constructor/deconstructor for `TypeRef` instances. */
+ val TypeRef: TypeRefExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeRef(pre, sym, args)`
+ * Here, `pre` is the prefix of the type reference, `sym` is the symbol
+ * referred to by the type reference, and `args` is a possible empty list of
+ * type argumenrts.
+ */
+ abstract class TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type // not TypeRefbecause of implementation details
+ def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
+ }
/** The API that all type refs support */
trait TypeRefApi extends TypeApi { this: TypeRef =>
@@ -204,8 +357,46 @@ trait Types extends base.Types { self: Universe =>
val args: List[Type]
}
- /** .. */
- override type RefinedType >: Null <: CompoundType with RefinedTypeApi
+ /** A subtype of Type representing refined types as well as `ClassInfo` signatures.
+ */
+ type CompoundType >: Null <: AnyRef with Type
+
+ /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val CompoundTypeTag: ClassTag[CompoundType]
+
+ /** The `RefinedType` type defines types of any of the forms on the left,
+ * with their RefinedType representations to the right.
+ * {{{
+ * P_1 with ... with P_m { D_1; ...; D_n} RefinedType(List(P_1, ..., P_m), Scope(D_1, ..., D_n))
+ * P_1 with ... with P_m RefinedType(List(P_1, ..., P_m), Scope())
+ * { D_1; ...; D_n} RefinedType(List(AnyRef), Scope(D_1, ..., D_n))
+ * }}}
+ */
+ type RefinedType >: Null <: AnyRef with CompoundType with RefinedTypeApi
+
+ /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val RefinedTypeTag: ClassTag[RefinedType]
+
+ /** The constructor/deconstructor for `RefinedType` instances. */
+ val RefinedType: RefinedTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `RefinedType(parents, decls)`
+ * Here, `parents` is the list of parent types of the class, and `decls` is the scope
+ * containing all declarations in the class.
+ */
+ abstract class RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope): RefinedType
+
+ /** An alternative constructor that passes in the synthetic classs symbol
+ * that backs the refined type. (Normally, a fresh class symbol is created automatically).
+ */
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
+ def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
+ }
/** The API that all refined types support */
trait RefinedTypeApi extends TypeApi { this: RefinedType =>
@@ -213,8 +404,35 @@ trait Types extends base.Types { self: Universe =>
val decls: Scope
}
- /** .. */
- override type ClassInfoType >: Null <: CompoundType with ClassInfoTypeApi
+ /** The `ClassInfo` type signature is used to define parents and declarations
+ * of classes, traits, and objects. If a class, trait, or object C is declared like this
+ * {{{
+ * C extends P_1 with ... with P_m { D_1; ...; D_n}
+ * }}}
+ * its `ClassInfo` type has the following form:
+ * {{{
+ * ClassInfo(List(P_1, ..., P_m), Scope(D_1, ..., D_n), C)
+ * }}}
+ */
+ type ClassInfoType >: Null <: AnyRef with CompoundType with ClassInfoTypeApi
+
+ /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
+
+ /** The constructor/deconstructor for `ClassInfoType` instances. */
+ val ClassInfoType: ClassInfoTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ClassInfo(parents, decls, clazz)`
+ * Here, `parents` is the list of parent types of the class, `decls` is the scope
+ * containing all declarations in the class, and `clazz` is the symbol of the class
+ * itself.
+ */
+ abstract class ClassInfoTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
+ def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
+ }
/** The API that all class info types support */
trait ClassInfoTypeApi extends TypeApi { this: ClassInfoType =>
@@ -223,8 +441,36 @@ trait Types extends base.Types { self: Universe =>
val typeSymbol: Symbol
}
- /** .. */
- override type MethodType >: Null <: Type with MethodTypeApi
+ /** The `MethodType` type signature is used to indicate parameters and result type of a method
+ */
+ type MethodType >: Null <: AnyRef with Type with MethodTypeApi
+
+ /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val MethodTypeTag: ClassTag[MethodType]
+
+ /** The constructor/deconstructor for `MethodType` instances. */
+ val MethodType: MethodTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `MethodType(params, respte)`
+ * Here, `params` is a potentially empty list of parameter symbols of the method,
+ * and `restpe` is the result type of the method. If the method is curried, `restpe` would
+ * be another `MethodType`.
+ * Note: `MethodType(Nil, Int)` would be the type of a method defined with an empty parameter list.
+ * {{{
+ * def f(): Int
+ * }}}
+ * If the method is completely parameterless, as in
+ * {{{
+ * def f: Int
+ * }}}
+ * its type is a `NullaryMethodType`.
+ */
+ abstract class MethodTypeExtractor {
+ def apply(params: List[Symbol], resultType: Type): MethodType
+ def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
+ }
/** The API that all method types support */
trait MethodTypeApi extends TypeApi { this: MethodType =>
@@ -232,16 +478,53 @@ trait Types extends base.Types { self: Universe =>
val resultType: Type
}
- /** .. */
- override type NullaryMethodType >: Null <: Type with NullaryMethodTypeApi
+ /** The `NullaryMethodType` type signature is used for parameterless methods
+ * with declarations of the form `def foo: T`
+ */
+ type NullaryMethodType >: Null <: AnyRef with Type with NullaryMethodTypeApi
+
+ /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
+
+ /** The constructor/deconstructor for `NullaryMethodType` instances. */
+ val NullaryMethodType: NullaryMethodTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `NullaryMethodType(resultType)`.
+ * Here, `resultType` is the result type of the parameterless method.
+ */
+ abstract class NullaryMethodTypeExtractor {
+ def apply(resultType: Type): NullaryMethodType
+ def unapply(tpe: NullaryMethodType): Option[(Type)]
+ }
/** The API that all nullary method types support */
trait NullaryMethodTypeApi extends TypeApi { this: NullaryMethodType =>
val resultType: Type
}
- /** .. */
- override type PolyType >: Null <: Type with PolyTypeApi
+ /** The `PolyType` type signature is used for polymorphic methods
+ * that have at least one type parameter.
+ */
+ type PolyType >: Null <: AnyRef with Type with PolyTypeApi
+
+ /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val PolyTypeTag: ClassTag[PolyType]
+
+ /** The constructor/deconstructor for `PolyType` instances. */
+ val PolyType: PolyTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `PolyType(typeParams, resultType)`.
+ * Here, `typeParams` are the type parameters of the method and `resultType`
+ * is the type signature following the type parameters.
+ */
+ abstract class PolyTypeExtractor {
+ def apply(typeParams: List[Symbol], resultType: Type): PolyType
+ def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
+ }
/** The API that all polymorphic types support */
trait PolyTypeApi extends TypeApi { this: PolyType =>
@@ -249,8 +532,28 @@ trait Types extends base.Types { self: Universe =>
val resultType: Type
}
- /** .. */
- override type ExistentialType >: Null <: Type with ExistentialTypeApi
+ /** The `ExistentialType` type signature is used for existential types and
+ * wildcard types.
+ */
+ type ExistentialType >: Null <: AnyRef with Type with ExistentialTypeApi
+
+ /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val ExistentialTypeTag: ClassTag[ExistentialType]
+
+ /** The constructor/deconstructor for `ExistentialType` instances. */
+ val ExistentialType: ExistentialTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax
+ * `ExistentialType(quantified, underlying)`.
+ * Here, `quantified` are the type variables bound by the existential type and `underlying`
+ * is the type that's existentially quantified.
+ */
+ abstract class ExistentialTypeExtractor {
+ def apply(quantified: List[Symbol], underlying: Type): ExistentialType
+ def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
+ }
/** The API that all existential types support */
trait ExistentialTypeApi extends TypeApi { this: ExistentialType =>
@@ -258,18 +561,64 @@ trait Types extends base.Types { self: Universe =>
val underlying: Type
}
- /** .. */
- override type AnnotatedType >: Null <: Type with AnnotatedTypeApi
+ /** The `AnnotatedType` type signature is used for annotated types of the
+ * for `<type> @<annotation>`.
+ */
+ type AnnotatedType >: Null <: AnyRef with Type with AnnotatedTypeApi
+
+ /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+
+ /** The constructor/deconstructor for `AnnotatedType` instances. */
+ val AnnotatedType: AnnotatedTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax
+ * `AnnotatedType(annotations, underlying, selfsym)`.
+ * Here, `annotations` are the annotations decorating the underlying type `underlying`.
+ * `selfSym` is a symbol representing the annotated type itself.
+ */
+ abstract class AnnotatedTypeExtractor {
+ def apply(annotations: List[Annotation], underlying: Type, selfsym: Symbol): AnnotatedType
+ def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type, Symbol)]
+ }
/** The API that all annotated types support */
trait AnnotatedTypeApi extends TypeApi { this: AnnotatedType =>
- val annotations: List[AnnotationInfo]
+ val annotations: List[Annotation]
val underlying: Type
val selfsym: Symbol
}
- /** .. */
- override type TypeBounds >: Null <: Type with TypeBoundsApi
+ /** The `TypeBounds` type signature is used to indicate lower and upper type bounds
+ * of type parameters and abstract types. It is not a first-class type.
+ * If an abstract type or type parameter is declared with any of the forms
+ * on the left, its type signature is the TypeBounds type on the right.
+ * {{{
+ * T >: L <: U TypeBounds(L, U)
+ * T >: L TypeBounds(L, Any)
+ * T <: U TypeBounds(Nothing, U)
+ * }}}
+ */
+ type TypeBounds >: Null <: AnyRef with Type with TypeBoundsApi
+
+ /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val TypeBoundsTag: ClassTag[TypeBounds]
+
+ /** The constructor/deconstructor for `TypeBounds` instances. */
+ val TypeBounds: TypeBoundsExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeBound(lower, upper)`
+ * Here, `lower` is the lower bound of the `TypeBounds` pair, and `upper` is
+ * the upper bound.
+ */
+ abstract class TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds
+ def unapply(tpe: TypeBounds): Option[(Type, Type)]
+ }
/** The API that all type bounds support */
trait TypeBoundsApi extends TypeApi { this: TypeBounds =>
@@ -277,8 +626,38 @@ trait Types extends base.Types { self: Universe =>
val hi: Type
}
- /** .. */
- override type BoundedWildcardType >: Null <: Type with BoundedWildcardTypeApi
+ /** An object representing an unknown type, used during type inference.
+ * If you see WildcardType outside of inference it is almost certainly a bug.
+ */
+ val WildcardType: Type
+
+ /** BoundedWildcardTypes, used only during type inference, are created in
+ * two places:
+ *
+ * 1. If the expected type of an expression is an existential type,
+ * its hidden symbols are replaced with bounded wildcards.
+ * 2. When an implicit conversion is being sought based in part on
+ * the name of a method in the converted type, a HasMethodMatching
+ * type is created: a MethodType with parameters typed as
+ * BoundedWildcardTypes.
+ */
+ type BoundedWildcardType >: Null <: AnyRef with Type with BoundedWildcardTypeApi
+
+ /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ */
+ implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+
+ /** The constructor/deconstructor for `BoundedWildcardType` instances. */
+ val BoundedWildcardType: BoundedWildcardTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `BoundedWildcardTypeExtractor(bounds)`
+ * with `bounds` denoting the type bounds.
+ */
+ abstract class BoundedWildcardTypeExtractor {
+ def apply(bounds: TypeBounds): BoundedWildcardType
+ def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
+ }
/** The API that all this types support */
trait BoundedWildcardTypeApi extends TypeApi { this: BoundedWildcardType =>
@@ -293,7 +672,6 @@ trait Types extends base.Types { self: Universe =>
// Creators ---------------------------------------------------------------
// too useful and too non-trivial to be left out of public API
- // [Eugene to Paul] needs review!
/** The canonical creator for single-types */
def singleType(pre: Type, sym: Symbol): Type
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 3dce0f218e..7d0f6cf0d6 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -1,17 +1,82 @@
package scala.reflect
package api
-abstract class Universe extends base.Universe
- with Symbols
+abstract class Universe extends Symbols
with Types
with FlagSets
+ with Scopes
with Names
with Trees
- with Printers
with Constants
+ with Annotations
with Positions
- with Mirrors
+ with Exprs
+ with TypeTags
+ with TagInterop
with StandardDefinitions
with StandardNames
+ with BuildUtils
+ with Mirrors
+ with Printers
with Importers
- with AnnotationInfos
+{
+ /** Produce the abstract syntax tree representing the given Scala expression.
+ *
+ * For example
+ *
+ * {{{
+ * val five = reify{ 5 } // Literal(Constant(5))
+ * reify{ 2 + 4 } // Apply( Select( Literal(Constant(2)), newTermName("$plus")), List( Literal(Constant(4)) ) )
+ * reify{ five.splice + 4 } // Apply( Select( Literal(Constant(5)), newTermName("$plus")), List( Literal(Constant(4)) ) )
+ * }}}
+ *
+ * The produced tree is path dependent on the Universe `reify` was called from.
+ *
+ * Use [[scala.reflect.api.Exprs#Expr.splice]] to embed an existing expression into a reify call. Use [[Expr]] to turn a [[Tree]] into an expression that can be spliced.
+ *
+ * == Further info and implementation details ==
+ *
+ * `reify` is implemented as a macro, which given an expression, generates a tree that when compiled and executed produces the original tree.
+ *
+ * For instance in `reify{ x + 1 }` the macro `reify` receives the abstract syntax tree of `x + 1` as its argument, which is
+ *
+ * {{{
+ * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
+ * }}}
+ *
+ * and returns a tree, which produces the tree above, when compiled and executed. So in other terms, the refiy call expands to something like
+ *
+ * {{{
+ * val $u: u.type = u // where u is a reference to the Universe that calls the reify
+ * $u.Expr[Int]($u.Apply($u.Select($u.Ident($u.newFreeVar("x", <Int>, x), "+"), List($u.Literal($u.Constant(1))))))
+ * }}}
+ *
+ * ------
+ *
+ * Reification performs expression splicing (when processing Expr.splice)
+ * and type splicing (for every type T that has a TypeTag[T] implicit in scope):
+ *
+ * {{{
+ * val two = mirror.reify(2) // Literal(Constant(2))
+ * val four = mirror.reify(two.splice + two.splice) // Apply(Select(two.tree, newTermName("$plus")), List(two.tree))
+ *
+ * def macroImpl[T](c: Context) = {
+ * ...
+ * // T here is just a type parameter, so the tree produced by reify won't be of much use in a macro expansion
+ * // however, if T were annotated with c.WeakTypeTag (which would declare an implicit parameter for macroImpl)
+ * // then reification would substitute T with the TypeTree that was used in a TypeApply of this particular macro invocation
+ * val factory = c.reify{ new Queryable[T] }
+ * ...
+ * }
+ * }}}
+ *
+ * The transformation looks mostly straightforward, but it has its tricky parts:
+ * - Reifier retains symbols and types defined outside the reified tree, however
+ * locally defined entities get erased and replaced with their original trees
+ * - Free variables are detected and wrapped in symbols of the type `FreeTermSymbol` or `FreeTypeSymbol`
+ * - Mutable variables that are accessed from a local function are wrapped in refs
+ */
+ // implementation is hardwired to `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ def reify[T](expr: T): Expr[T] = ??? // macro
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala
index d2fce7cf1d..0b2a43936e 100644
--- a/src/reflect/scala/reflect/api/package.scala
+++ b/src/reflect/scala/reflect/api/package.scala
@@ -1,12 +1,80 @@
package scala.reflect
-package object api {
+import scala.reflect.api.{Universe => ApiUniverse}
- // type and value aliases for slices of the base Universe cake that are not
- // repeated in api.Universe
- type Scopes = base.Scopes
- type BuildUtils = base.BuildUtils
- type Attachments = base.Attachments
+/**
+ * The main package of Scala's reflection library.
+ *
+ * The reflection library is structured according to the 'cake pattern'. The main layer
+ * resides in package [[scala.reflect.api]] and defines an interface to the following main types:
+ *
+ * - [[scala.reflect.api.Types#Type Types]] represent types
+ * - [[scala.reflect.api.Symbols#Symbol Symbols]] represent definitions
+ * - [[scala.reflect.api.Trees#Tree Trees]] represent abstract syntax trees
+ * - [[scala.reflect.api.Names#Name Names]] represent term and type names
+ * - [[scala.reflect.api.Annotations#Annotation Annotations]] represent annotations
+ * - [[scala.reflect.api.Positions#Position Positions]] represent source positions of tree nodes
+ * - [[scala.reflect.api.FlagSets#FlagSet FlagSet]] represent sets of flags that apply to symbols and
+ * definition trees
+ * - [[scala.reflect.api.Constants#Constant Constants]] represent compile-time constants.
+ *
+ * Each of these types are defined in their own enclosing traits, which are ultimately all inherited by class
+ * [[scala.reflect.api.Universe Universe]]. The main universe defines a minimal interface to the above types.
+ * Universes that provide additional functionality such as deeper introspection or runtime code generation,
+ * are defined in packages [[scala.reflect.api]] and `scala.tools.reflect`.
+ *
+ * The cake pattern employed here requires to write certain Scala idioms with more indirections that usual.
+ * What follows is a description of these indirections, which will help to navigate the Scaladocs easily.
+ *
+ * For instance, consider the base type of all abstract syntax trees: [[scala.reflect.api.Trees#Tree]].
+ * This type is not a class but is abstract and has an upper bound of [[scala.reflect.api.Trees#TreeApi]],
+ * which is a class defining the minimal base interface for all trees.
+ *
+ * For a more interesting tree type, consider [[scala.reflect.api.Trees#If]] representing if-expressions.
+ * It is defined next to a value `If` of type [[scala.reflect.api.Trees#IfExtractor]].
+ * This value serves as the companion object defining a factory method `apply` and a corresponding `unapply`
+ * for pattern matching.
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val cond = reify{ condition }.tree // <- just some tree representing a condition
+ * val body = Literal(Constant(1))
+ * val other = Literal(Constant(2))
+ * val iftree = If(cond,body,other)
+ * }}}
+ *
+ * is equivalent to
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val iftree = reify{ if( condition ) 1 else 2 }.tree
+ * }}}
+ *
+ * and can be pattern matched as
+ *
+ * {{{
+ * iftree match { case If(cond,body,other) => ... }
+ * }}}
+ *
+ * Moreover, there is an implicit value [[scala.reflect.api.Trees#IfTag]] of type
+ * `ClassTag[If]` that is used by the Scala compiler so that we can indeed pattern match on `If`:
+ * {{{
+ * iftree match { case _:If => ... }
+ * }}}
+ * Without the given implicit value, this pattern match would raise an "unchecked" warning at compile time
+ * since `If` is an abstract type that gets erased at runtime. See [[scala.reflect.ClassTag]] for details.
+ *
+ * To summarize: each tree type `X` (and similarly for other types such as `Type` or `Symbol`) is represented
+ * by an abstract type `X`, optionally together with a class `XApi` that defines `X`'s' interface.
+ * `X`'s companion object, if it exists, is represented by a value `X` that is of type `XExtractor`.
+ * Moreover, for each type `X`, there is a value `XTag` of type `ClassTag[X]` that allows to pattern match on `X`.
+ */
+package object api {
- type MirrorOf[U <: base.Universe with Singleton] = base.MirrorOf[U]
-}
+ // anchors for materialization macros emitted during tag materialization in Implicits.scala
+ // implementation is hardwired into `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ // todo. once we have implicit macros for tag generation, we can remove these anchors
+ private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = ??? // macro
+ private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = ??? // macro
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/AbstractFileApi.scala b/src/reflect/scala/reflect/internal/AbstractFileApi.scala
deleted file mode 100644
index 9f37f4536f..0000000000
--- a/src/reflect/scala/reflect/internal/AbstractFileApi.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package scala.reflect
-package internal
-
-trait AbstractFileApi {
- def path: String
- def canonicalPath: String
-}
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 229570dafd..3bd7f4f4fa 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -8,9 +8,11 @@ package internal
import util._
import pickling.ByteCodecs
+import scala.annotation.tailrec
+import scala.collection.immutable.ListMap
/** AnnotationInfo and its helpers */
-trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
+trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
// Common annotation code between Symbol and Type.
@@ -31,11 +33,27 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
case AnnotationInfo(tp, Literal(Constant(tpe: Type)) :: Nil, _) if tp.typeSymbol == ThrowsClass => tpe.typeSymbol
}
- /** Test for, get, or remove an annotation */
- def hasAnnotation(cls: Symbol) = annotations exists (_ matches cls)
- def getAnnotation(cls: Symbol) = annotations find (_ matches cls)
+ /** Tests for, get, or remove an annotation */
+ def hasAnnotation(cls: Symbol): Boolean =
+ //OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
+ dropOtherAnnotations(annotations, cls).nonEmpty
+
+ def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
+ //OPT inlined from exists to save on #closures; was: annotations find (_ matches cls)
+ dropOtherAnnotations(annotations, cls) match {
+ case ann :: _ => Some(ann)
+ case _ => None
+ }
+
def removeAnnotation(cls: Symbol): Self = filterAnnotations(ann => !(ann matches cls))
+
final def withAnnotation(annot: AnnotationInfo): Self = withAnnotations(List(annot))
+
+ @tailrec private
+ def dropOtherAnnotations(anns: List[AnnotationInfo], cls: Symbol): List[AnnotationInfo] = anns match {
+ case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
+ case Nil => Nil
+ }
}
/** Arguments to classfile annotations (which are written to
@@ -46,28 +64,47 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
* - or nested classfile annotations
*/
abstract class ClassfileAnnotArg extends Product
- implicit val ClassfileAnnotArgTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ implicit val JavaArgumentTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ case object UnmappableAnnotArg extends ClassfileAnnotArg
/** Represents a compile-time Constant (`Boolean`, `Byte`, `Short`,
* `Char`, `Int`, `Long`, `Float`, `Double`, `String`, `java.lang.Class` or
* an instance of a Java enumeration value).
*/
case class LiteralAnnotArg(const: Constant)
- extends ClassfileAnnotArg with LiteralAnnotArgApi {
+ extends ClassfileAnnotArg with LiteralArgumentApi {
+ def value = const
override def toString = const.escapedStringValue
}
- implicit val LiteralAnnotArgTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
-
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
+ object LiteralAnnotArg extends LiteralArgumentExtractor
/** Represents an array of classfile annotation arguments */
case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
- extends ClassfileAnnotArg with ArrayAnnotArgApi {
+ extends ClassfileAnnotArg with ArrayArgumentApi {
override def toString = args.mkString("[", ", ", "]")
}
- implicit val ArrayAnnotArgTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ object ArrayAnnotArg extends ArrayArgumentExtractor
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
+ /** Represents a nested classfile annotation */
+ case class NestedAnnotArg(annInfo: AnnotationInfo)
+ extends ClassfileAnnotArg with NestedArgumentApi {
+ // The nested annotation should not have any Scala annotation arguments
+ assert(annInfo.args.isEmpty, annInfo.args)
+ def annotation = annInfo
+ override def toString = annInfo.toString
+ }
+ object NestedAnnotArg extends NestedArgumentExtractor
+
+ type JavaArgument = ClassfileAnnotArg
+ type LiteralArgument = LiteralAnnotArg
+ val LiteralArgument = LiteralAnnotArg
+ implicit val LiteralArgumentTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
+ type ArrayArgument = ArrayAnnotArg
+ val ArrayArgument = ArrayAnnotArg
+ implicit val ArrayArgumentTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ type NestedArgument = NestedAnnotArg
+ val NestedArgument = NestedAnnotArg
+ implicit val NestedArgumentTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
/** A specific annotation argument that encodes an array of bytes as an
* array of `Long`. The type of the argument declared in the annotation
@@ -104,20 +141,9 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
src
}
-
- }
-
- /** Represents a nested classfile annotation */
- case class NestedAnnotArg(annInfo: AnnotationInfo) extends ClassfileAnnotArg with NestedAnnotArgApi {
- // The nested annotation should not have any Scala annotation arguments
- assert(annInfo.args.isEmpty, annInfo.args)
- override def toString = annInfo.toString
}
- implicit val NestedAnnotArgTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
-
- object NestedAnnotArg extends NestedAnnotArgExtractor
- object AnnotationInfo extends AnnotationInfoExtractor {
+ object AnnotationInfo {
def marker(atp: Type): AnnotationInfo =
apply(atp, Nil, Nil)
@@ -148,11 +174,14 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
this
}
- override def toString = (
- atp +
- (if (!args.isEmpty) args.mkString("(", ", ", ")") else "") +
- (if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else "")
- )
+ override def toString = completeAnnotationToString(this)
+ }
+
+ private[scala] def completeAnnotationToString(annInfo: AnnotationInfo) = {
+ import annInfo._
+ val s_args = if (!args.isEmpty) args.mkString("(", ", ", ")") else ""
+ val s_assocs = if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else ""
+ s"${atp}${s_args}${s_assocs}"
}
/** Symbol annotations parsed in `Namer` (typeCompleter of
@@ -190,11 +219,15 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
*
* `assocs` stores arguments to classfile annotations as name-value pairs.
*/
- sealed abstract class AnnotationInfo extends AnnotationInfoApi {
+ abstract class AnnotationInfo extends AnnotationApi {
def atp: Type
def args: List[Tree]
def assocs: List[(Name, ClassfileAnnotArg)]
+ def tpe = atp
+ def scalaArgs = args
+ def javaArgs = ListMap(assocs: _*)
+
// necessary for reification, see Reifiers.scala for more info
def original: Tree
def setOriginal(t: Tree): this.type
@@ -282,7 +315,14 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
}
}
- implicit val AnnotationInfoTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
+ type Annotation = AnnotationInfo
+ object Annotation extends AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, ClassfileAnnotArg]): Annotation =
+ AnnotationInfo(tpe, scalaArgs, javaArgs.toList)
+ def unapply(annotation: Annotation): Option[(Type, List[Tree], ListMap[Name, ClassfileAnnotArg])] =
+ Some((annotation.tpe, annotation.scalaArgs, annotation.javaArgs))
+ }
+ implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index fbee906b7b..539984c67f 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -7,7 +7,7 @@ package internal
// todo implement in terms of BitSet
import scala.collection.{ mutable, immutable }
-import math.max
+import scala.math.max
import util.Statistics
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
@@ -39,8 +39,8 @@ trait BaseTypeSeqs {
*/
class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
- Statistics.incCounter(baseTypeSeqCount)
- Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
+ if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount)
+ if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
/** The number of types in the sequence */
def length: Int = elems.length
@@ -99,7 +99,7 @@ trait BaseTypeSeqs {
def copy(head: Type, offset: Int): BaseTypeSeq = {
val arr = new Array[Type](elems.length + offset)
- compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
+ scala.compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
arr(0) = head
newBaseTypeSeq(parents, arr)
}
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 74b9442076..9f41f0336e 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -3,9 +3,9 @@ package internal
import Flags._
-trait BuildUtils extends base.BuildUtils { self: SymbolTable =>
+trait BuildUtils { self: SymbolTable =>
- class BuildImpl extends BuildBase {
+ class BuildImpl extends BuildApi {
def selectType(owner: Symbol, name: String): TypeSymbol =
select(owner, newTypeName(name)).asType
@@ -30,14 +30,11 @@ trait BuildUtils extends base.BuildUtils { self: SymbolTable =>
else MissingRequirementError.notFound("overloaded method %s #%d in %s".format(name, index, owner.fullName))
}
- def newFreeTerm(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- newFreeTermSymbol(newTermName(name), info, value, flags, origin)
+ def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ newFreeTermSymbol(newTermName(name), value, flags, origin)
- def newFreeType(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- newFreeTypeSymbol(newTypeName(name), info, value, (if (flags == 0L) PARAM else flags) | DEFERRED, origin)
-
- def newFreeExistential(name: String, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- newFreeTypeSymbol(newTypeName(name), info, value, (if (flags == 0L) EXISTENTIAL else flags) | DEFERRED, origin)
+ def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ newFreeTypeSymbol(newTypeName(name), flags, origin)
def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
owner.newNestedSymbol(name, pos, flags, isClass)
@@ -67,5 +64,5 @@ trait BuildUtils extends base.BuildUtils { self: SymbolTable =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
}
- val build: BuildBase = new BuildImpl
+ val build: BuildApi = new BuildImpl
}
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
index 6ece733b06..b1ae105e56 100644
--- a/src/reflect/scala/reflect/internal/Chars.scala
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -5,9 +5,9 @@
package scala.reflect
package internal
-import annotation.{ tailrec, switch }
+import scala.annotation.{ tailrec, switch }
import java.lang.{ Character => JCharacter }
-import language.postfixOps
+import scala.language.postfixOps
/** Contains constants and classifier methods for characters */
trait Chars {
@@ -51,7 +51,7 @@ trait Chars {
}
/** Is character a line break? */
- @inline def isLineBreakChar(c: Char) = (c: @switch) match {
+ def isLineBreakChar(c: Char) = (c: @switch) match {
case LF|FF|CR|SU => true
case _ => false
}
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index 124f1f881d..62ed130232 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -6,7 +6,7 @@
package scala.reflect
package internal
-import annotation.switch
+import scala.annotation.switch
object ClassfileConstants {
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index e5a543da46..4e232e486b 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -7,7 +7,7 @@ package scala.reflect
package internal
import java.lang.Integer.toOctalString
-import annotation.switch
+import scala.annotation.switch
trait Constants extends api.Constants {
self: SymbolTable =>
@@ -31,6 +31,9 @@ trait Constants extends api.Constants {
final val EnumTag = 13
case class Constant(value: Any) extends ConstantApi {
+ import java.lang.Double.doubleToRawLongBits
+ import java.lang.Float.floatToRawIntBits
+
val tag: Int = value match {
case null => NullTag
case x: Unit => UnitTag
@@ -70,21 +73,16 @@ trait Constants extends api.Constants {
case DoubleTag => DoubleClass.tpe
case StringTag => StringClass.tpe
case NullTag => NullClass.tpe
- case ClazzTag => ClassType(value.asInstanceOf[Type])
- case EnumTag =>
- // given (in java): "class A { enum E { VAL1 } }"
- // - symbolValue: the symbol of the actual enumeration value (VAL1)
- // - .owner: the ModuleClasSymbol of the enumeration (object E)
- // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
- symbolValue.owner.linkedClassOfClass.tpe
+ case ClazzTag => ClassType(typeValue)
+ case EnumTag => EnumType(symbolValue)
}
/** We need the equals method to take account of tags as well as values.
*/
+ // !!! In what circumstance could `equalHashValue == that.equalHashValue && tag != that.tag` be true?
override def equals(other: Any): Boolean = other match {
case that: Constant =>
- this.tag == that.tag &&
- (this.value == that.value || this.isNaN && that.isNaN)
+ this.tag == that.tag && equalHashValue == that.equalHashValue
case _ => false
}
@@ -236,7 +234,30 @@ trait Constants extends api.Constants {
def typeValue: Type = value.asInstanceOf[Type]
def symbolValue: Symbol = value.asInstanceOf[Symbol]
- override def hashCode: Int = value.## * 41 + 17
+ /**
+ * Consider two `NaN`s to be identical, despite non-equality
+ * Consider -0d to be distinct from 0d, despite equality
+ *
+ * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`)
+ * to avoid treating different encodings of `NaN` as the same constant.
+ * You probably can't express different `NaN` varieties as compile time
+ * constants in regular Scala code, but it is conceivable that you could
+ * conjure them with a macro.
+ */
+ private def equalHashValue: Any = value match {
+ case f: Float => floatToRawIntBits(f)
+ case d: Double => doubleToRawLongBits(d)
+ case v => v
+ }
+
+ override def hashCode: Int = {
+ import scala.util.hashing.MurmurHash3._
+ val seed = 17
+ var h = seed
+ h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
+ h = mix(h, equalHashValue.##)
+ finalizeHash(h, length = 2)
+ }
}
object Constant extends ConstantExtractor
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index c6815d10c3..6cdca3d7f8 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -6,11 +6,11 @@
package scala.reflect
package internal
-import annotation.{ switch, meta }
+import scala.annotation.{ switch, meta }
import scala.collection.{ mutable, immutable }
import Flags._
import PartialFunction._
-import scala.reflect.base.{Universe => BaseUniverse}
+import scala.reflect.api.{Universe => ApiUniverse}
trait Definitions extends api.StandardDefinitions {
self: SymbolTable =>
@@ -19,23 +19,6 @@ trait Definitions extends api.StandardDefinitions {
object definitions extends DefinitionsClass
- // [Eugene] find a way to make these non-lazy
- lazy val ByteTpe = definitions.ByteClass.toTypeConstructor
- lazy val ShortTpe = definitions.ShortClass.toTypeConstructor
- lazy val CharTpe = definitions.CharClass.toTypeConstructor
- lazy val IntTpe = definitions.IntClass.toTypeConstructor
- lazy val LongTpe = definitions.LongClass.toTypeConstructor
- lazy val FloatTpe = definitions.FloatClass.toTypeConstructor
- lazy val DoubleTpe = definitions.DoubleClass.toTypeConstructor
- lazy val BooleanTpe = definitions.BooleanClass.toTypeConstructor
- lazy val UnitTpe = definitions.UnitClass.toTypeConstructor
- lazy val AnyTpe = definitions.AnyClass.toTypeConstructor
- lazy val ObjectTpe = definitions.ObjectClass.toTypeConstructor
- lazy val AnyValTpe = definitions.AnyValClass.toTypeConstructor
- lazy val AnyRefTpe = definitions.AnyRefClass.toTypeConstructor
- lazy val NothingTpe = definitions.NothingClass.toTypeConstructor
- lazy val NullTpe = definitions.NullClass.toTypeConstructor
-
/** Since both the value parameter types and the result type may
* require access to the type parameter symbols, we model polymorphic
* creation as a function from those symbols to (formal types, result type).
@@ -143,10 +126,19 @@ trait Definitions extends api.StandardDefinitions {
lazy val Boolean_or = getMemberMethod(BooleanClass, nme.ZOR)
lazy val Boolean_not = getMemberMethod(BooleanClass, nme.UNARY_!)
- lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
+ lazy val UnitTpe = UnitClass.toTypeConstructor
+ lazy val ByteTpe = ByteClass.toTypeConstructor
+ lazy val ShortTpe = ShortClass.toTypeConstructor
+ lazy val CharTpe = CharClass.toTypeConstructor
+ lazy val IntTpe = IntClass.toTypeConstructor
+ lazy val LongTpe = LongClass.toTypeConstructor
+ lazy val FloatTpe = FloatClass.toTypeConstructor
+ lazy val DoubleTpe = DoubleClass.toTypeConstructor
+ lazy val BooleanTpe = BooleanClass.toTypeConstructor
- def ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
- def ScalaValueClasses: List[ClassSymbol] = List(
+ lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
+ lazy val ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
+ lazy val ScalaValueClasses: List[ClassSymbol] = List(
UnitClass,
BooleanClass,
ByteClass,
@@ -226,6 +218,32 @@ trait Definitions extends api.StandardDefinitions {
case _ => null
}
+ /** Fully initialize the symbol, type, or scope.
+ */
+ def fullyInitializeSymbol(sym: Symbol): Symbol = {
+ sym.initialize
+ fullyInitializeType(sym.info)
+ fullyInitializeType(sym.tpe)
+ sym
+ }
+ def fullyInitializeType(tp: Type): Type = {
+ tp.typeParams foreach fullyInitializeSymbol
+ tp.paramss.flatten foreach fullyInitializeSymbol
+ tp
+ }
+ def fullyInitializeScope(scope: Scope): Scope = {
+ scope.sorted foreach fullyInitializeSymbol
+ scope
+ }
+ /** Is this type equivalent to Any, AnyVal, or AnyRef? */
+ def isTrivialTopType(tp: Type) = (
+ tp =:= AnyClass.tpe
+ || tp =:= AnyValClass.tpe
+ || tp =:= AnyRefClass.tpe
+ )
+ /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */
+ def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp))
+
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
if (parents.head.typeSymbol == AnyClass) tpe
@@ -242,6 +260,9 @@ trait Definitions extends api.StandardDefinitions {
lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
lazy val ObjectClass = getRequiredClass(sn.Object.toString)
+ lazy val AnyTpe = definitions.AnyClass.toTypeConstructor
+ lazy val AnyRefTpe = definitions.AnyRefClass.toTypeConstructor
+ lazy val ObjectTpe = definitions.ObjectClass.toTypeConstructor
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
@@ -255,6 +276,8 @@ trait Definitions extends api.StandardDefinitions {
anyval.info.decls enter av_constr
anyval
}).asInstanceOf[ClassSymbol]
+ lazy val AnyValTpe = definitions.AnyValClass.toTypeConstructor
+ def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
// bottom types
lazy val RuntimeNothingClass = getClassByName(fulltpnme.RuntimeNothing)
@@ -276,6 +299,8 @@ trait Definitions extends api.StandardDefinitions {
|| (that ne NothingClass) && (that isSubClass ObjectClass)
)
}
+ lazy val NothingTpe = definitions.NothingClass.toTypeConstructor
+ lazy val NullTpe = definitions.NullClass.toTypeConstructor
// exceptions and other throwables
lazy val ClassCastExceptionClass = requiredClass[ClassCastException]
@@ -302,7 +327,7 @@ trait Definitions extends api.StandardDefinitions {
def Sys_error = getMemberMethod(SysPackage, nme.error)
// Modules whose members are in the default namespace
- // [Eugene++] ScalaPackage and JavaLangPackage are never ever shared between mirrors
+ // SI-5941: ScalaPackage and JavaLangPackage are never ever shared between mirrors
// as a result, `Int` becomes `scala.Int` and `String` becomes `java.lang.String`
// I could just change `isOmittablePrefix`, but there's more to it, so I'm leaving this as a todo for now
lazy val UnqualifiedModules = List(PredefModule, ScalaPackage, JavaLangPackage)
@@ -338,7 +363,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val SymbolModule = requiredModule[scala.Symbol.type]
lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply)
- def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq) // [Eugene++] obsolete?
def arrayApplyMethod = getMemberMethod(ScalaRunTimeModule, nme.array_apply)
def arrayUpdateMethod = getMemberMethod(ScalaRunTimeModule, nme.array_update)
def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length)
@@ -383,9 +407,10 @@ trait Definitions extends api.StandardDefinitions {
def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
def isJavaRepeatedParamType(tp: Type) = tp.typeSymbol == JavaRepeatedParamClass
def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
+ def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe)
def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
- def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
+ def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
def isJavaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isJavaRepeatedParamType(params.last.tpe)
def isScalaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe)
def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
@@ -459,51 +484,50 @@ trait Definitions extends api.StandardDefinitions {
// scala.reflect
lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type]
- def ReflectBasis = getMemberValue(ReflectPackage, nme.basis)
+ lazy val ReflectApiPackage = getPackageObjectIfDefined("scala.reflect.api") // defined in scala-reflect.jar, so we need to be careful
lazy val ReflectRuntimePackage = getPackageObjectIfDefined("scala.reflect.runtime") // defined in scala-reflect.jar, so we need to be careful
def ReflectRuntimeUniverse = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
- lazy val PartialManifestClass = getMemberType(ReflectPackage, tpnme.ClassManifest)
+ lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
lazy val FullManifestClass = requiredClass[scala.reflect.Manifest[_]]
lazy val FullManifestModule = requiredModule[scala.reflect.ManifestFactory.type]
lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
- lazy val ExprsClass = requiredClass[scala.reflect.base.Exprs]
- lazy val ExprClass = getMemberClass(ExprsClass, tpnme.Expr)
- def ExprSplice = getMemberMethod(ExprClass, nme.splice)
- def ExprValue = getMemberMethod(ExprClass, nme.value)
- lazy val ExprModule = getMemberModule(ExprsClass, nme.Expr)
-
- lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
- lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
- lazy val TypeTagsClass = requiredClass[scala.reflect.base.TypeTags]
- lazy val AbsTypeTagClass = getMemberClass(TypeTagsClass, tpnme.AbsTypeTag)
- lazy val AbsTypeTagModule = getMemberModule(TypeTagsClass, nme.AbsTypeTag)
- lazy val TypeTagClass = getMemberClass(TypeTagsClass, tpnme.TypeTag)
- lazy val TypeTagModule = getMemberModule(TypeTagsClass, nme.TypeTag)
-
- lazy val BaseUniverseClass = requiredClass[scala.reflect.base.Universe]
- def BaseUniverseReify = getMemberMethod(BaseUniverseClass, nme.reify)
+ lazy val ExprsClass = getClassIfDefined("scala.reflect.api.Exprs") // defined in scala-reflect.jar, so we need to be careful
+ lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol
+ def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol
+ def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol
+ lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol
+
+ lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
+ lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
+ lazy val TypeTagsClass = getClassIfDefined("scala.reflect.api.TypeTags") // defined in scala-reflect.jar, so we need to be careful
+ lazy val WeakTypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.WeakTypeTag) else NoSymbol
+ lazy val WeakTypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.WeakTypeTag) else NoSymbol
+ lazy val TypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.TypeTag) else NoSymbol
+ lazy val TypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.TypeTag) else NoSymbol
+ def materializeClassTag = getMemberMethod(ReflectPackage, nme.materializeClassTag)
+ def materializeWeakTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeWeakTypeTag) else NoSymbol
+ def materializeTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeTypeTag) else NoSymbol
+
+ lazy val ApiUniverseClass = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful
+ def ApiUniverseReify = if (ApiUniverseClass != NoSymbol) getMemberMethod(ApiUniverseClass, nme.reify) else NoSymbol
lazy val JavaUniverseClass = getClassIfDefined("scala.reflect.api.JavaUniverse") // defined in scala-reflect.jar, so we need to be careful
- lazy val MirrorOfClass = requiredClass[scala.reflect.base.MirrorOf[_]]
+ lazy val MirrorClass = getClassIfDefined("scala.reflect.api.Mirror") // defined in scala-reflect.jar, so we need to be careful
- lazy val TypeCreatorClass = requiredClass[scala.reflect.base.TypeCreator]
- lazy val TreeCreatorClass = requiredClass[scala.reflect.base.TreeCreator]
+ lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
+ lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol
- def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getMemberType(MacroContextClass, tpnme.PrefixType) else NoSymbol
+ def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol
def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol
def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol
lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
- lazy val MacroInternalPackage = getPackageObject("scala.reflect.macros.internal")
- def MacroInternal_materializeClassTag = getMemberMethod(MacroInternalPackage, nme.materializeClassTag)
- def MacroInternal_materializeAbsTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeAbsTypeTag)
- def MacroInternal_materializeTypeTag = getMemberMethod(MacroInternalPackage, nme.materializeTypeTag)
lazy val StringContextClass = requiredClass[scala.StringContext]
def StringContext_f = getMemberMethod(StringContextClass, nme.f)
@@ -517,8 +541,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
- def compilerTypeFromTag(tt: BaseUniverse # AbsTypeTag[_]): Type = tt.in(rootMirror).tpe
- def compilerSymbolFromTag(tt: BaseUniverse # AbsTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
+ def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe
+ def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
@@ -689,6 +713,13 @@ trait Definitions extends api.StandardDefinitions {
if (phase.erasedTypes || forMSIL) ClassClass.tpe
else appliedType(ClassClass, arg)
+ def EnumType(sym: Symbol) =
+ // given (in java): "class A { enum E { VAL1 } }"
+ // - sym: the symbol of the actual enumeration value (VAL1)
+ // - .owner: the ModuleClassSymbol of the enumeration (object E)
+ // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
+ sym.owner.linkedClassOfClass.tpe
+
def vmClassType(arg: Type): Type = ClassType(arg)
def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
@@ -907,7 +938,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val SwitchClass = requiredClass[scala.annotation.switch]
lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
lazy val VarargsClass = requiredClass[scala.annotation.varargs]
- lazy val StaticClass = requiredClass[scala.annotation.static]
lazy val uncheckedStableClass = requiredClass[scala.annotation.unchecked.uncheckedStable]
lazy val uncheckedVarianceClass = requiredClass[scala.annotation.unchecked.uncheckedVariance]
@@ -916,6 +946,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
+ lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
+ lazy val DeprecatedOverridingAttr = requiredClass[scala.deprecatedOverriding]
lazy val NativeAttr = requiredClass[scala.native]
lazy val RemoteAttr = requiredClass[scala.remote]
lazy val ScalaInlineClass = requiredClass[scala.inline]
@@ -978,12 +1010,7 @@ trait Definitions extends api.StandardDefinitions {
throw new FatalError(owner + " does not have a " + what + " " + name)
}
- def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol =
- // [Eugene++] `getMemberClass` leads to crashes in mixin:
- // "object languageFeature does not have a member class implicitConversions"
- // that's because by that time `implicitConversions` becomes a module
- // getMemberClass(owner, newTypeName(name))
- getMember(owner, newTypeName(name))
+ def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
@@ -1008,28 +1035,24 @@ trait Definitions extends api.StandardDefinitions {
}
}
def getMemberValue(owner: Symbol, name: Name): TermSymbol = {
- // [Eugene++] should be a ClassCastException instead?
getMember(owner, name.toTermName) match {
case x: TermSymbol => x
case _ => fatalMissingSymbol(owner, name, "member value")
}
}
def getMemberModule(owner: Symbol, name: Name): ModuleSymbol = {
- // [Eugene++] should be a ClassCastException instead?
getMember(owner, name.toTermName) match {
case x: ModuleSymbol => x
case _ => fatalMissingSymbol(owner, name, "member object")
}
}
- def getMemberType(owner: Symbol, name: Name): TypeSymbol = {
- // [Eugene++] should be a ClassCastException instead?
+ def getTypeMember(owner: Symbol, name: Name): TypeSymbol = {
getMember(owner, name.toTypeName) match {
case x: TypeSymbol => x
- case _ => fatalMissingSymbol(owner, name, "member type")
+ case _ => fatalMissingSymbol(owner, name, "type member")
}
}
def getMemberClass(owner: Symbol, name: Name): ClassSymbol = {
- // [Eugene++] should be a ClassCastException instead?
val y = getMember(owner, name.toTypeName)
getMember(owner, name.toTypeName) match {
case x: ClassSymbol => x
@@ -1037,48 +1060,8 @@ trait Definitions extends api.StandardDefinitions {
}
}
def getMemberMethod(owner: Symbol, name: Name): TermSymbol = {
- // [Eugene++] is this a bug?
- //
- // System.err.println(result.getClass)
- // System.err.println(result.flags)
- // System.err.println("isMethod = " + result.isMethod)
- // System.err.println("isTerm = " + result.isTerm)
- // System.err.println("isValue = " + result.isValue)
- // result.asMethod
- //
- // prints this:
- //
- // quick.lib:
- // [javac] Compiling 1 source file to C:\Projects\KeplerUnderRefactoring\build\quick\classes\library
- // [scalacfork] Compiling 769 files to C:\Projects\KeplerUnderRefactoring\build\quick\classes\library
- // [scalacfork] class scala.reflect.internal.Symbols$TermSymbol
- // [scalacfork] 8589934592
- // [scalacfork] isMethod = false
- // [scalacfork] isTerm = true
- // [scalacfork] isValue = true
- // [scalacfork]
- // [scalacfork] while compiling: C:\Projects\KeplerUnderRefactoring\src\library\scala\LowPriorityImplicits.scala
- // [scalacfork] current phase: cleanup
- // [scalacfork] library version: version 2.10.0-20120507-185519-665d1d9127
- // [scalacfork] compiler version: version 2.10.0-20120507-185519-665d1d9127
- // [scalacfork] reconstructed args: -Xmacros -classpath C:\\Projects\\KeplerUnderRefactoring\\build\\quick\\classes\\library;C:\\Projects\\KeplerUnderRefactoring\\lib\\forkjoin.jar -d C:\\Projects\\KeplerUnderRefactoring\\build\\quick\\classes\\library -sourcepath C:\\Projects\\KeplerUnderRefactoring\\src\\library
- // [scalacfork]
- // [scalacfork] unhandled exception while transforming LowPriorityImplicits.scala
- // [scalacfork] error:
- // [scalacfork] while compiling: C:\Projects\KeplerUnderRefactoring\src\library\scala\LowPriorityImplicits.scala
- // [scalacfork] current phase: cleanup
- // [scalacfork] library version: version 2.10.0-20120507-185519-665d1d9127
- // [scalacfork] compiler version: version 2.10.0-20120507-185519-665d1d9127
- // [scalacfork] reconstructed args: -Xmacros -classpath C:\\Projects\\KeplerUnderRefactoring\\build\\quick\\classes\\library;C:\\Projects\\KeplerUnderRefactoring\\lib\\forkjoin.jar -d C:\\Projects\\KeplerUnderRefactoring\\build\\quick\\classes\\library -sourcepath C:\\Projects\\KeplerUnderRefactoring\\src\\library
- // [scalacfork]
- // [scalacfork] uncaught exception during compilation: java.lang.ClassCastException
- // [scalacfork] error: java.lang.ClassCastException: value apply
- // [scalacfork] at scala.reflect.base.Symbols$SymbolBase$class.asMethod(Symbols.scala:118)
- // [scalacfork] at scala.reflect.internal.Symbols$SymbolContextApiImpl.asMethod(Symbols.scala:63)
- // [scalacfork] at scala.reflect.internal.Definitions$DefinitionsClass.Symbol_apply(Definitions.scala:381)
-
- // [Eugene++] should be a ClassCastException instead?
getMember(owner, name.toTermName) match {
+ // todo. member symbol becomes a term symbol in cleanup. is this a bug?
// case x: MethodSymbol => x
case x: TermSymbol => x
case _ => fatalMissingSymbol(owner, name, "method")
@@ -1138,7 +1121,8 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a phantom class for which no runtime representation exists? */
lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
- lazy val magicSymbols = List(
+ /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreClasses = List(
AnnotationDefaultAttr, // #2264
RepeatedParamClass,
JavaRepeatedParamClass,
@@ -1149,7 +1133,10 @@ trait Definitions extends api.StandardDefinitions {
NullClass,
NothingClass,
SingletonClass,
- EqualsPatternClass,
+ EqualsPatternClass
+ )
+ /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreMethods = List(
Any_==,
Any_!=,
Any_equals,
@@ -1167,10 +1154,19 @@ trait Definitions extends api.StandardDefinitions {
Object_synchronized,
Object_isInstanceOf,
Object_asInstanceOf,
- String_+,
+ String_+
+ )
+ /** Lists core classes that do have underlying bytecode, but are adjusted on-the-fly in every reflection universe */
+ lazy val hijackedCoreClasses = List(
ComparableClass,
JavaSerializableClass
)
+ /** Lists symbols that are synthesized or hijacked by the compiler.
+ *
+ * Such symbols either don't have any underlying bytecode at all ("synthesized")
+ * or get loaded from bytecode but have their metadata adjusted ("hijacked").
+ */
+ lazy val symbolsNotPresentInBytecode = syntheticCoreClasses ++ syntheticCoreMethods ++ hijackedCoreClasses
/** Is the symbol that of a parent which is added during parsing? */
lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
@@ -1234,7 +1230,8 @@ trait Definitions extends api.StandardDefinitions {
def init() {
if (isInitialized) return
- val forced = magicSymbols // force initialization of every symbol that is entered as a side effect
+ // force initialization of every symbol that is synthesized or hijacked by the compiler
+ val forced = symbolsNotPresentInBytecode
isInitialized = true
} //init
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index 6e77741355..6270416d4f 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -1,7 +1,7 @@
package scala.reflect
package internal
-import language.implicitConversions
+import scala.language.implicitConversions
trait FlagSets extends api.FlagSets { self: SymbolTable =>
@@ -13,7 +13,6 @@ trait FlagSets extends api.FlagSets { self: SymbolTable =>
private class FlagOpsImpl(left: Long) extends FlagOps {
def | (right: Long): Long = left | right
- def hasFlag(right: Long): Boolean = (left & right) != 0
}
val NoFlags: FlagSet = 0L
@@ -22,10 +21,8 @@ trait FlagSets extends api.FlagSets { self: SymbolTable =>
object Flag extends FlagValues {
val TRAIT : FlagSet = Flags.TRAIT
- val MODULE : FlagSet = Flags.MODULE
+ val INTERFACE : FlagSet = Flags.INTERFACE
val MUTABLE : FlagSet = Flags.MUTABLE
- val PACKAGE : FlagSet = Flags.PACKAGE
- val METHOD : FlagSet = Flags.METHOD
val MACRO : FlagSet = Flags.MACRO
val DEFERRED : FlagSet = Flags.DEFERRED
val ABSTRACT : FlagSet = Flags.ABSTRACT
@@ -36,15 +33,15 @@ trait FlagSets extends api.FlagSets { self: SymbolTable =>
val OVERRIDE : FlagSet = Flags.OVERRIDE
val PRIVATE : FlagSet = Flags.PRIVATE
val PROTECTED : FlagSet = Flags.PROTECTED
+ val LOCAL : FlagSet = Flags.LOCAL
val CASE : FlagSet = Flags.CASE
val ABSOVERRIDE : FlagSet = Flags.ABSOVERRIDE
val BYNAMEPARAM : FlagSet = Flags.BYNAMEPARAM
val PARAM : FlagSet = Flags.PARAM
- val PARAMACCESSOR : FlagSet = Flags.PARAMACCESSOR
- val CASEACCESSOR : FlagSet = Flags.CASEACCESSOR
val COVARIANT : FlagSet = Flags.COVARIANT
val CONTRAVARIANT : FlagSet = Flags.CONTRAVARIANT
val DEFAULTPARAM : FlagSet = Flags.DEFAULTPARAM
- val INTERFACE : FlagSet = Flags.INTERFACE
+ val PRESUPER : FlagSet = Flags.PRESUPER
+ val DEFAULTINIT : FlagSet = Flags.DEFAULTINIT
}
}
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index f3f3bf5ce8..bb454b1df7 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -135,7 +135,7 @@ class Flags extends ModifierFlags {
final val CAPTURED = 1 << 16 // variable is accessed from nested function. Set by LambdaLift.
final val LABEL = 1 << 17 // method symbol is a label. Set by TailCall
final val INCONSTRUCTOR = 1 << 17 // class symbol is defined in this/superclass constructor.
- final val SYNTHETIC = 1 << 21 // symbol is compiler-generated (compare with HIDDEN)
+ final val SYNTHETIC = 1 << 21 // symbol is compiler-generated (compare with ARTIFACT)
final val STABLE = 1 << 22 // functions that are assumed to be stable
// (typically, access methods for valdefs)
// or classes that do not contain abstract types.
@@ -165,7 +165,7 @@ class Flags extends ModifierFlags {
// A Java method's type is ``cooked'' by transforming raw types to existentials
final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
- final val HIDDEN = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+ final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
// ------- shift definitions -------------------------------------------------------
@@ -218,7 +218,7 @@ class Flags extends ModifierFlags {
/** To be a little clearer to people who aren't habitual bit twiddlers.
*/
final val AllFlags = -1L
-
+
/** These flags can be set when class or module symbol is first created.
* They are the only flags to survive a call to resetFlags().
*/
@@ -288,14 +288,20 @@ class Flags extends ModifierFlags {
/** These flags are not pickled */
final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING
-
+
// A precaution against future additions to FlagsNotPickled turning out
// to be overloaded flags thus not-pickling more than intended.
assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled))
-
+
/** These flags are pickled */
final val PickledFlags = InitialFlags & ~FlagsNotPickled
+ /** If we have a top-level class or module
+ * and someone asks us for a flag not in TopLevelPickledFlags,
+ * then we don't need unpickling to give a definite answer.
+ */
+ final val TopLevelPickledFlags = PickledFlags & ~(MODULE | METHOD | PACKAGE | PARAM | EXISTENTIAL)
+
def getterFlags(fieldFlags: Long): Long = ACCESSOR + (
if ((fieldFlags & MUTABLE) != 0) fieldFlags & ~MUTABLE & ~PRESUPER
else fieldFlags & ~PRESUPER | STABLE
@@ -339,13 +345,13 @@ class Flags extends ModifierFlags {
(SEALED, SEALED_PKL),
(ABSTRACT, ABSTRACT_PKL)
)
-
+
private val mappedRawFlags = rawPickledCorrespondence map (_._1)
private val mappedPickledFlags = rawPickledCorrespondence map (_._2)
-
+
private class MapFlags(from: Array[Long], to: Array[Long]) extends (Long => Long) {
val fromSet = (0L /: from) (_ | _)
-
+
def apply(flags: Long): Long = {
var result = flags & ~fromSet
var tobeMapped = flags & fromSet
@@ -360,7 +366,7 @@ class Flags extends ModifierFlags {
result
}
}
-
+
val rawToPickledFlags: Long => Long = new MapFlags(mappedRawFlags, mappedPickledFlags)
val pickledToRawFlags: Long => Long = new MapFlags(mappedPickledFlags, mappedRawFlags)
@@ -434,7 +440,7 @@ class Flags extends ModifierFlags {
case 0x8000000000000000L => "" // (1L << 63)
case _ => ""
}
-
+
private def accessString(flags: Long, privateWithin: String)= (
if (privateWithin == "") {
if ((flags & PrivateLocal) == PrivateLocal) "private[this]"
@@ -446,7 +452,7 @@ class Flags extends ModifierFlags {
else if ((flags & PROTECTED) != 0) "protected[" + privateWithin + "]"
else "private[" + privateWithin + "]"
)
-
+
@deprecated("Use flagString on the flag-carrying member", "2.10.0")
def flagsToString(flags: Long, privateWithin: String): String = {
val access = accessString(flags, privateWithin)
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index 7ead9d6a1b..4a3663b8ea 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -66,7 +66,7 @@ trait HasFlags {
*/
def flagString: String = flagString(flagMask)
def flagString(mask: Long): String = calculateFlagString(flags & mask)
-
+
/** The default mask determining which flags to display.
*/
def flagMask: Long = AllFlags
@@ -92,13 +92,14 @@ trait HasFlags {
def isCaseAccessor = hasFlag(CASEACCESSOR)
def isDeferred = hasFlag(DEFERRED)
def isFinal = hasFlag(FINAL)
- def isHidden = hasFlag(HIDDEN)
+ def isArtifact = hasFlag(ARTIFACT)
def isImplicit = hasFlag(IMPLICIT)
def isInterface = hasFlag(INTERFACE)
def isJavaDefined = hasFlag(JAVA)
def isLabel = hasAllFlags(LABEL | METHOD) && !hasAccessorFlag
def isLazy = hasFlag(LAZY)
def isLifted = hasFlag(LIFTED)
+ def isMacro = hasFlag(MACRO)
def isMutable = hasFlag(MUTABLE)
def isOverride = hasFlag(OVERRIDE)
def isParamAccessor = hasFlag(PARAMACCESSOR)
@@ -109,6 +110,7 @@ trait HasFlags {
def isProtectedLocal = hasAllFlags(ProtectedLocal)
def isPublic = hasNoFlags(PRIVATE | PROTECTED) && !hasAccessBoundary
def isSealed = hasFlag(SEALED)
+ def isSpecialized = hasFlag(SPECIALIZED)
def isSuperAccessor = hasFlag(SUPERACCESSOR)
def isSynthetic = hasFlag(SYNTHETIC)
def isTrait = hasFlag(TRAIT) && !hasFlag(PARAM)
@@ -136,7 +138,7 @@ trait HasFlags {
def accessString: String = {
val pw = if (hasAccessBoundary) privateWithin.toString else ""
-
+
if (pw == "") {
if (hasAllFlags(PrivateLocal)) "private[this]"
else if (hasAllFlags(ProtectedLocal)) "protected[this]"
@@ -150,7 +152,7 @@ trait HasFlags {
protected def calculateFlagString(basis: Long): String = {
val access = accessString
val nonAccess = flagBitsToString(basis & ~AccessFlags)
-
+
if (access == "") nonAccess
else if (nonAccess == "") access
else nonAccess + " " + access
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 00017e087a..43902c1930 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -1,11 +1,12 @@
package scala.reflect
package internal
+
import scala.collection.mutable.WeakHashMap
+import scala.ref.WeakReference
-// todo: move importers to a mirror
-trait Importers { self: SymbolTable =>
+// SI-6241: move importers to a mirror
+trait Importers extends api.Importers { self: SymbolTable =>
- // [Eugene] possible to make this less cast-heavy?
def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
if (self eq from0) {
new Importer {
@@ -27,13 +28,17 @@ trait Importers { self: SymbolTable =>
val from: SymbolTable
- lazy val symMap: WeakHashMap[from.Symbol, Symbol] = new WeakHashMap
- lazy val tpeMap: WeakHashMap[from.Type, Type] = new WeakHashMap
+ protected lazy val symMap = new Cache[from.Symbol, Symbol]()
+ protected lazy val tpeMap = new Cache[from.Type, Type]()
+ protected class Cache[K <: AnyRef, V <: AnyRef] extends WeakHashMap[K, WeakReference[V]] {
+ def weakGet(key: K): Option[V] = this get key flatMap WeakReference.unapply
+ def weakUpdate(key: K, value: V) = this.update(key, WeakReference(value))
+ }
// fixups and maps prevent stackoverflows in importer
var pendingSyms = 0
var pendingTpes = 0
- lazy val fixups = collection.mutable.MutableList[Function0[Unit]]()
+ lazy val fixups = scala.collection.mutable.MutableList[Function0[Unit]]()
def addFixup(fixup: => Unit): Unit = fixups += (() => fixup)
def tryFixup(): Unit = {
if (pendingSyms == 0 && pendingTpes == 0) {
@@ -45,8 +50,10 @@ trait Importers { self: SymbolTable =>
object reverse extends from.StandardImporter {
val from: self.type = self
- for ((fromsym, mysym) <- StandardImporter.this.symMap) symMap += ((mysym, fromsym))
- for ((fromtpe, mytpe) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, fromtpe))
+ // FIXME this and reverse should be constantly kept in sync
+ // not just synced once upon the first usage of reverse
+ for ((fromsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(fromsym)))
+ for ((fromtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(fromtpe)))
}
// todo. careful import of positions
@@ -54,70 +61,70 @@ trait Importers { self: SymbolTable =>
pos.asInstanceOf[Position]
def importSymbol(sym0: from.Symbol): Symbol = {
- def doImport(sym: from.Symbol): Symbol = {
- if (symMap.contains(sym))
- return symMap(sym)
-
- val myowner = importSymbol(sym.owner)
- val mypos = importPosition(sym.pos)
- val myname = importName(sym.name).toTermName
- val myflags = sym.flags
- def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
- symMap(x) = mysym
- mysym.referenced = op(x.referenced)
- mysym
- }
- val mysym = sym match {
- case x: from.MethodSymbol =>
- linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
- case x: from.ModuleSymbol =>
- linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
- case x: from.FreeTermSymbol =>
- newFreeTermSymbol(importName(x.name).toTermName, importType(x.info), x.value, x.flags, x.origin)
- case x: from.FreeTypeSymbol =>
- newFreeTypeSymbol(importName(x.name).toTypeName, importType(x.info), x.value, x.flags, x.origin)
- case x: from.TermSymbol =>
- linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
- case x: from.TypeSkolem =>
- val origin = x.unpackLocation match {
- case null => null
- case y: from.Tree => importTree(y)
- case y: from.Symbol => importSymbol(y)
+ def doImport(sym: from.Symbol): Symbol =
+ symMap weakGet sym match {
+ case Some(result) => result
+ case _ =>
+ val myowner = importSymbol(sym.owner)
+ val mypos = importPosition(sym.pos)
+ val myname = importName(sym.name).toTermName
+ val myflags = sym.flags
+ def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
+ symMap.weakUpdate(x, mysym)
+ mysym.referenced = op(x.referenced)
+ mysym
}
- myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
- case x: from.ModuleClassSymbol =>
- val mysym = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
- symMap(x) = mysym
- mysym.sourceModule = importSymbol(x.sourceModule)
- mysym
- case x: from.ClassSymbol =>
- val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
- symMap(x) = mysym
- if (sym.thisSym != sym) {
- mysym.typeOfThis = importType(sym.typeOfThis)
- mysym.thisSym setName importName(sym.thisSym.name)
+ val mysym = sym match {
+ case x: from.MethodSymbol =>
+ linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
+ case x: from.ModuleSymbol =>
+ linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
+ case x: from.FreeTermSymbol =>
+ newFreeTermSymbol(importName(x.name).toTermName, x.value, x.flags, x.origin) setInfo importType(x.info)
+ case x: from.FreeTypeSymbol =>
+ newFreeTypeSymbol(importName(x.name).toTypeName, x.flags, x.origin)
+ case x: from.TermSymbol =>
+ linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
+ case x: from.TypeSkolem =>
+ val origin = x.unpackLocation match {
+ case null => null
+ case y: from.Tree => importTree(y)
+ case y: from.Symbol => importSymbol(y)
+ }
+ myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
+ case x: from.ModuleClassSymbol =>
+ val mysym = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(x, mysym)
+ mysym.sourceModule = importSymbol(x.sourceModule)
+ mysym
+ case x: from.ClassSymbol =>
+ val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(x, mysym)
+ if (sym.thisSym != sym) {
+ mysym.typeOfThis = importType(sym.typeOfThis)
+ mysym.thisSym setName importName(sym.thisSym.name)
+ }
+ mysym
+ case x: from.TypeSymbol =>
+ myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
}
- mysym
- case x: from.TypeSymbol =>
- myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
- }
- symMap(sym) = mysym
- mysym setFlag Flags.LOCKED
- mysym setInfo {
- val mytypeParams = sym.typeParams map importSymbol
- new LazyPolyType(mytypeParams) {
- override def complete(s: Symbol) {
- val result = sym.info match {
- case from.PolyType(_, res) => res
- case result => result
+ symMap.weakUpdate(sym, mysym)
+ mysym setFlag Flags.LOCKED
+ mysym setInfo {
+ val mytypeParams = sym.typeParams map importSymbol
+ new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
+ override def complete(s: Symbol) {
+ val result = sym.info match {
+ case from.PolyType(_, res) => res
+ case result => result
+ }
+ s setInfo GenPolyType(mytypeParams, importType(result))
+ s setAnnotations (sym.annotations map importAnnotationInfo)
+ }
}
- s setInfo GenPolyType(mytypeParams, importType(result))
- s setAnnotations (sym.annotations map importAnnotationInfo)
}
- }
- }
- mysym resetFlag Flags.LOCKED
- } // end doImport
+ mysym resetFlag Flags.LOCKED
+ } // end doImport
def importOrRelink: Symbol = {
val sym = sym0 // makes sym visible in the debugger
@@ -187,17 +194,18 @@ trait Importers { self: SymbolTable =>
} // end importOrRelink
val sym = sym0
- if (symMap contains sym) {
- symMap(sym)
- } else {
- pendingSyms += 1
-
- try {
- symMap getOrElseUpdate (sym, importOrRelink)
- } finally {
- pendingSyms -= 1
- tryFixup()
- }
+ symMap.weakGet(sym) match {
+ case Some(result) => result
+ case None =>
+ pendingSyms += 1
+ try {
+ val result = importOrRelink
+ symMap.weakUpdate(sym, result)
+ result
+ } finally {
+ pendingSyms -= 1
+ tryFixup()
+ }
}
}
@@ -239,7 +247,7 @@ trait Importers { self: SymbolTable =>
case from.AntiPolyType(pre, targs) =>
AntiPolyType(importType(pre), targs map importType)
case x: from.TypeVar =>
- TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
+ TypeVar(importType(x.origin), importTypeConstraint(x.constr), x.typeArgs map importType, x.params map importSymbol)
case from.NotNullType(tpe) =>
NotNullType(importType(tpe))
case from.AnnotatedType(annots, tpe, selfsym) =>
@@ -259,17 +267,18 @@ trait Importers { self: SymbolTable =>
def importOrRelink: Type =
doImport(tpe)
- if (tpeMap contains tpe) {
- tpeMap(tpe)
- } else {
- pendingTpes += 1
-
- try {
- tpeMap getOrElseUpdate (tpe, importOrRelink)
- } finally {
- pendingTpes -= 1
- tryFixup()
- }
+ tpeMap.weakGet(tpe) match {
+ case Some(result) => result
+ case None =>
+ pendingTpes += 1
+ try {
+ val result = importOrRelink
+ tpeMap.weakUpdate(tpe, result)
+ result
+ } finally {
+ pendingTpes -= 1
+ tryFixup()
+ }
}
}
@@ -449,4 +458,4 @@ trait Importers { self: SymbolTable =>
case _ => constant.value
})
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 2e2e63a4b4..019cf7f908 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -10,11 +10,16 @@ package internal
import Flags._
trait Mirrors extends api.Mirrors {
- self: SymbolTable =>
+ thisUniverse: SymbolTable =>
override type Mirror >: Null <: RootsBase
- abstract class RootsBase(rootOwner: Symbol) extends MirrorOf[Mirrors.this.type] { thisMirror =>
+ // root symbols hold a strong reference to the enclosing mirror
+ // this prevents the mirror from being collected
+ // if there are any symbols created by that mirror
+ trait RootSymbol extends Symbol { def mirror: Mirror }
+
+ abstract class RootsBase(rootOwner: Symbol) extends scala.reflect.api.Mirror[Mirrors.this.type] { thisMirror =>
protected[scala] def rootLoader: LazyType
@@ -70,7 +75,7 @@ trait Mirrors extends api.Mirrors {
protected def mirrorMissingHook(owner: Symbol, name: Name): Symbol = NoSymbol
- protected def universeMissingHook(owner: Symbol, name: Name): Symbol = self.missingHook(owner, name)
+ protected def universeMissingHook(owner: Symbol, name: Name): Symbol = thisUniverse.missingHook(owner, name)
private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
@@ -247,14 +252,15 @@ trait Mirrors extends api.Mirrors {
// is very beneficial for a handful of bootstrap symbols to have
// first class identities
sealed trait WellKnownSymbol extends Symbol {
- this initFlags TopLevelCreationFlags
+ this initFlags (TopLevelCreationFlags | STATIC)
}
// Features common to RootClass and RootPackage, the roots of all
// type and term symbols respectively.
- sealed trait RootSymbol extends WellKnownSymbol {
+ sealed trait RootSymbol extends WellKnownSymbol with thisUniverse.RootSymbol {
final override def isRootSymbol = true
override def owner = rootOwner
override def typeOfThis = thisSym.tpe
+ def mirror = thisMirror.asInstanceOf[Mirror]
}
// This is the package _root_. The actual root cannot be referenced at
@@ -276,7 +282,6 @@ trait Mirrors extends api.Mirrors {
override def isRoot = true
override def isEffectiveRoot = true
- override def isStatic = true
override def isNestedClass = false
}
// The empty package, which holds all top level types without given packages.
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 835a46f05d..0114fb037c 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -8,7 +8,7 @@ package internal
import scala.io.Codec
import java.security.MessageDigest
-import language.implicitConversions
+import scala.language.implicitConversions
trait LowPriorityNames {
self: Names =>
@@ -67,7 +67,7 @@ trait Names extends api.Names with LowPriorityNames {
while (i < len) {
if (nc + i == chrs.length) {
val newchrs = new Array[Char](chrs.length * 2)
- compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
+ scala.compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
chrs = newchrs
}
chrs(nc + i) = cs(offset + i)
@@ -149,11 +149,15 @@ trait Names extends api.Names with LowPriorityNames {
type ThisNameType >: Null <: Name
protected[this] def thisName: ThisNameType
+ // Note that "Name with ThisNameType" should be redundant
+ // because ThisNameType <: Name, but due to SI-6161 the
+ // compile loses track of this fact.
+
/** Index into name table */
def start: Int = index
/** The next name in the same hash bucket. */
- def next: ThisNameType
+ def next: Name with ThisNameType
/** The length of this name. */
final def length: Int = len
@@ -169,17 +173,17 @@ trait Names extends api.Names with LowPriorityNames {
def bothNames: List[Name] = List(toTermName, toTypeName)
/** Return the subname with characters from from to to-1. */
- def subName(from: Int, to: Int): ThisNameType
+ def subName(from: Int, to: Int): Name with ThisNameType
/** Return a new name of the same variety. */
- def newName(str: String): ThisNameType
+ def newName(str: String): Name with ThisNameType
/** Return a new name based on string transformation. */
- def mapName(f: String => String): ThisNameType = newName(f(toString))
+ def mapName(f: String => String): Name with ThisNameType = newName(f(toString))
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
- compat.Platform.arraycopy(chrs, index, cs, offset, len)
+ scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
final def toChars: Array[Char] = {
@@ -195,7 +199,7 @@ trait Names extends api.Names with LowPriorityNames {
*/
final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
val bytes = Codec.toUTF8(chrs, index, len)
- compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
+ scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
offset + bytes.length
}
@@ -387,7 +391,7 @@ trait Names extends api.Names with LowPriorityNames {
newTermName(cs, 0, len)
}
- /** TODO - reconcile/fix that encode returns a Name but
+ /* TODO - reconcile/fix that encode returns a Name but
* decode returns a String.
*/
@@ -415,9 +419,6 @@ trait Names extends api.Names with LowPriorityNames {
}
else toString
}
-
- @inline
- final def fingerPrint: Long = (1L << start)
/** TODO - find some efficiency. */
def append(ch: Char) = newName("" + this + ch)
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index e3e2063b05..cb8dc4b197 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -3,14 +3,14 @@
* @author Martin Odersky
*/
-// [Eugene++ to Martin] we need to unify this prettyprinter with NodePrinters
+// todo. we need to unify this prettyprinter with NodePrinters
package scala.reflect
package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import Flags._
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
trait Printers extends api.Printers { self: SymbolTable =>
@@ -174,12 +174,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
def printAnnotations(tree: Tree) {
- if (!isCompilerUniverse && tree.symbol != null && tree.symbol != NoSymbol)
- // [Eugene++] todo. this is not 100% correct, but is necessary for sane printing
- // the problem is that getting annotations doesn't automatically initialize the symbol
- // so we might easily print something as if it doesn't have annotations, whereas it does
- tree.symbol.initialize
-
+ // SI-5885: by default this won't print annotations of not yet initialized symbols
val annots = tree.symbol.annotations match {
case Nil => tree.asInstanceOf[MemberDef].mods.annotations
case anns => anns
@@ -585,7 +580,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
else print(sym.name)
if (printIds) print("#", sym.id)
if (printKinds) print("#", sym.abbreviatedKindString)
- if (printMirrors) print("%M", footnotes.put[MirrorOf[_]](mirrorThatLoaded(sym)))
+ if (printMirrors) print("%M", footnotes.put[scala.reflect.api.Mirror[_]](mirrorThatLoaded(sym)))
case NoType =>
print("NoType")
case NoPrefix =>
@@ -614,7 +609,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
if (depth == 0 && !printingFootnotes) {
printingFootnotes = true
footnotes.print[Type](this)
- footnotes.print[MirrorOf[_]](this)
+ footnotes.print[scala.reflect.api.Mirror[_]](this)
printingFootnotes = false
}
}
@@ -669,8 +664,9 @@ trait Printers extends api.Printers { self: SymbolTable =>
def show(flags: FlagSet): String = {
if (flags == NoFlags) nme.NoFlags.toString
else {
- val s_flags = new collection.mutable.ListBuffer[String]
- for (i <- 0 to 63 if (flags hasFlag (1L << i)))
+ val s_flags = new scala.collection.mutable.ListBuffer[String]
+ def hasFlag(left: Long, right: Long): Boolean = (left & right) != 0
+ for (i <- 0 to 63 if hasFlag(flags, 1L << i))
s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
s_flags mkString " | "
}
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index abbe8fbfb7..842491d56d 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -5,8 +5,6 @@ import settings.MutableSettings
trait Required { self: SymbolTable =>
- type AbstractFileType >: Null <: AbstractFileApi
-
def picklerPhase: Phase
def settings: MutableSettings
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index ed75b5d855..89332d0ae5 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -41,15 +41,10 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
* This is necessary because when run from reflection every scope needs to have a
* SynchronizedScope as mixin.
*/
- class Scope protected[Scopes] (initElems: ScopeEntry = null, initFingerPrints: Long = 0L) extends ScopeBase with MemberScopeBase {
-
- /** A bitset containing the last 6 bits of the start value of every name
- * stored in this scope.
- */
- var fingerPrints: Long = initFingerPrints
+ class Scope protected[Scopes] (initElems: ScopeEntry = null, initFingerPrints: Long = 0L) extends ScopeApi with MemberScopeApi {
protected[Scopes] def this(base: Scope) = {
- this(base.elems, base.fingerPrints)
+ this(base.elems)
nestinglevel = base.nestinglevel + 1
}
@@ -119,7 +114,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
* @param sym ...
*/
def enter[T <: Symbol](sym: T): T = {
- fingerPrints |= sym.name.fingerPrint
enterEntry(newScopeEntry(sym, this))
sym
}
@@ -156,7 +150,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
def rehash(sym: Symbol, newname: Name) {
- fingerPrints |= newname.fingerPrint
if (hashtable ne null) {
val index = sym.name.start & HASHMASK
var e1 = hashtable(index)
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 60b3a6f436..5c4d1f7e28 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -8,9 +8,9 @@ trait StdAttachments {
* Common code between reflect-internal Symbol and Tree related to Attachments.
*/
trait Attachable {
- protected var rawatt: base.Attachments { type Pos = Position } = NoPosition
+ protected var rawatt: scala.reflect.api.Attachments { type Pos = Position } = NoPosition
def attachments = rawatt
- def addAttachment(attachment: Any): this.type = { rawatt = rawatt.add(attachment); this }
+ def updateAttachment[T: ClassTag](attachment: T): this.type = { rawatt = rawatt.update(attachment); this }
def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
// cannot be final due to SynchronizedSymbols
diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala
index 3e6b7c1ab4..5e5e4f9043 100644
--- a/src/reflect/scala/reflect/internal/StdCreators.scala
+++ b/src/reflect/scala/reflect/internal/StdCreators.scala
@@ -1,20 +1,20 @@
package scala.reflect
package internal
-import scala.reflect.base.{TreeCreator, TypeCreator}
-import scala.reflect.base.{Universe => BaseUniverse}
+import scala.reflect.api.{TreeCreator, TypeCreator}
+import scala.reflect.api.{Universe => ApiUniverse}
trait StdCreators {
self: SymbolTable =>
- case class FixedMirrorTreeCreator(mirror: MirrorOf[StdCreators.this.type], tree: Tree) extends TreeCreator {
- def apply[U <: BaseUniverse with Singleton](m: MirrorOf[U]): U # Tree =
+ case class FixedMirrorTreeCreator(mirror: scala.reflect.api.Mirror[StdCreators.this.type], tree: Tree) extends TreeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: scala.reflect.api.Mirror[U]): U # Tree =
if (m eq mirror) tree.asInstanceOf[U # Tree]
else throw new IllegalArgumentException(s"Expr defined in $mirror cannot be migrated to other mirrors.")
}
- case class FixedMirrorTypeCreator(mirror: MirrorOf[StdCreators.this.type], tpe: Type) extends TypeCreator {
- def apply[U <: BaseUniverse with Singleton](m: MirrorOf[U]): U # Type =
+ case class FixedMirrorTypeCreator(mirror: scala.reflect.api.Mirror[StdCreators.this.type], tpe: Type) extends TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: scala.reflect.api.Mirror[U]): U # Type =
if (m eq mirror) tpe.asInstanceOf[U # Type]
else throw new IllegalArgumentException(s"Type tag defined in $mirror cannot be migrated to other mirrors.")
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index c1e5f78d50..2cdfb05e77 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -8,8 +8,8 @@ package internal
import java.security.MessageDigest
import Chars.isOperatorPart
-import annotation.switch
-import language.implicitConversions
+import scala.annotation.switch
+import scala.language.implicitConversions
import scala.collection.immutable
import scala.io.Codec
@@ -131,7 +131,7 @@ trait StdNames {
final val Seq: NameType = "Seq"
final val Symbol: NameType = "Symbol"
final val ClassTag: NameType = "ClassTag"
- final val AbsTypeTag: NameType = "AbsTypeTag"
+ final val WeakTypeTag: NameType = "WeakTypeTag"
final val TypeTag : NameType = "TypeTag"
final val Expr: NameType = "Expr"
final val String: NameType = "String"
@@ -206,7 +206,6 @@ trait StdNames {
}
abstract class TypeNames extends Keywords with TypeNamesApi {
- type NameType = TypeName
protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
final val BYNAME_PARAM_CLASS_NAME: NameType = "<byname>"
@@ -246,7 +245,6 @@ trait StdNames {
final val BeanPropertyAnnot: NameType = "BeanProperty"
final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
final val bridgeAnnot: NameType = "bridge"
- final val staticAnnot: NameType = "static"
// Classfile Attributes
final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
@@ -274,7 +272,6 @@ trait StdNames {
}
abstract class TermNames extends Keywords with TermNamesApi {
- type NameType = TermName
protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Base strings from which synthetic names are derived. */
@@ -571,7 +568,7 @@ trait StdNames {
// Compiler utilized names
val AnnotatedType: NameType = "AnnotatedType"
- val AnnotationInfo: NameType = "AnnotationInfo"
+ val Annotation: NameType = "Annotation"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
val AppliedTypeTree: NameType = "AppliedTypeTree"
@@ -636,7 +633,6 @@ trait StdNames {
val asInstanceOf_Ob : NameType = "$asInstanceOf"
val assert_ : NameType = "assert"
val assume_ : NameType = "assume"
- val basis : NameType = "basis"
val box: NameType = "box"
val build : NameType = "build"
val bytes: NameType = "bytes"
@@ -704,14 +700,13 @@ trait StdNames {
val manifestToTypeTag: NameType = "manifestToTypeTag"
val map: NameType = "map"
val materializeClassTag: NameType = "materializeClassTag"
- val materializeAbsTypeTag: NameType = "materializeAbsTypeTag"
+ val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
val materializeTypeTag: NameType = "materializeTypeTag"
val mirror : NameType = "mirror"
val moduleClass : NameType = "moduleClass"
val name: NameType = "name"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
- val newFreeExistential: NameType = "newFreeExistential"
val newFreeTerm: NameType = "newFreeTerm"
val newFreeType: NameType = "newFreeType"
val newNestedSymbol: NameType = "newNestedSymbol"
@@ -1008,8 +1003,6 @@ trait StdNames {
val javanme = nme.javaKeywords
- // [Eugene++ to Martin] had to move a lot of stuff from here to TermNames to satisfy the contract
- // why do we even have stuff in object nme? cf. object tpnme
object nme extends TermNames {
def isModuleVarName(name: Name): Boolean =
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index c564a93b62..2424e75949 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -43,7 +43,11 @@ abstract class SymbolTable extends macros.Universe
lazy val treeBuild = gen
def log(msg: => AnyRef): Unit
- def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
+ def warning(msg: String): Unit = Console.err.println(msg)
+ def globalError(msg: String): Unit = abort(msg)
+ def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
+
+ def shouldLogAtThisPhase = false
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
@@ -70,11 +74,13 @@ abstract class SymbolTable extends macros.Universe
Console.err.println(msg + ": " + result)
result
}
- private[scala] def logResult[T](msg: String)(result: T): T = {
+ @inline
+ final private[scala] def logResult[T](msg: => String)(result: T): T = {
log(msg + ": " + result)
result
}
- private[scala] def logResultIf[T](msg: String, cond: T => Boolean)(result: T): T = {
+ @inline
+ final private[scala] def logResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
if (cond(result))
log(msg + ": " + result)
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 2a306d7c6e..21506a498d 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -10,7 +10,8 @@ import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import util.Statistics
import Flags._
-import base.Attachments
+import scala.annotation.tailrec
+import scala.reflect.io.AbstractFile
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
@@ -26,7 +27,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//protected var activeLocks = 0
/** Used for debugging only */
- //protected var lockedSyms = collection.immutable.Set[Symbol]()
+ //protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
private var recursionTable = immutable.Map.empty[Symbol, Int]
@@ -46,13 +47,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new free term. Its owner is NoSymbol.
*/
- def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol =
- new FreeTermSymbol(name, value, origin) initFlags flags setInfo info
+ def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol =
+ new FreeTermSymbol(name, value, origin) initFlags flags
/** Create a new free type. Its owner is NoSymbol.
*/
- def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: Long = 0L, origin: String): FreeTypeSymbol =
- new FreeTypeSymbol(name, value, origin) initFlags flags setInfo info
+ def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
+ new FreeTypeSymbol(name, origin) initFlags flags
+
+ /** Determines whether the given information request should trigger the given symbol's completer.
+ * See comments to `Symbol.needsInitialize` for details.
+ */
+ protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
+ completer match {
+ case null => false
+ case _: FlagAgnosticCompleter => !isFlagRelated
+ case _ => abort(s"unsupported completer: $completer of class ${if (completer != null) completer.getClass else null} for symbol ${symbol.fullName}")
+ }
/** The original owner of a class. Used by the backend to generate
* EnclosingMethod attributes.
@@ -62,16 +73,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
abstract class SymbolContextApiImpl extends SymbolContextApi {
this: Symbol =>
- def kind: String = kindString
def isExistential: Boolean = this.isExistentiallyBound
def isParamWithDefault: Boolean = this.hasDefault
def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
+ def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
+ def isJava: Boolean = isJavaDefined
+ def isVal: Boolean = isTerm && !isModule && !isMethod && !isMutable
+ def isVar: Boolean = isTerm && !isModule && !isMethod && isMutable
def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match {
case n: TermName => newTermSymbol(n, pos, newFlags)
case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
}
+ def knownDirectSubclasses = children
+ def baseClasses = info.baseClasses
+ def module = sourceModule
def thisPrefix: Type = thisType
def selfType: Type = typeOfThis
def typeSignature: Type = info
@@ -80,10 +97,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def toType: Type = tpe
def toTypeIn(site: Type): Type = site.memberType(this)
def toTypeConstructor: Type = typeConstructor
- def setFlags(flags: FlagSet): this.type = setInternalFlags(flags)
- def setInternalFlags(flag: Long): this.type = { setFlag(flag); this }
def setTypeSignature(tpe: Type): this.type = { setInfo(tpe); this }
- def getAnnotations: List[AnnotationInfo] = { initialize; annotations }
def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
def getter: Symbol = getter(owner)
@@ -108,7 +122,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// with the proper specific type.
def rawname: NameType
def name: NameType
- def name_=(n: Name): Unit
+ def name_=(n: Name): Unit = {
+ if (shouldLogAtThisPhase) {
+ val msg = s"Renaming $fullLocationString to $n"
+ if (isSpecialized) debuglog(msg) else log(msg)
+ }
+ }
def asNameType(n: Name): NameType
private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
@@ -208,9 +227,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val m = newModuleSymbol(clazz.name.toTermName, clazz.pos, MODULE | newFlags)
connectModuleToClass(m, clazz.asInstanceOf[ClassSymbol])
}
- final def newModule(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = {
- val m = newModuleSymbol(name, pos, newFlags | MODULE)
- val clazz = newModuleClass(name.toTypeName, pos, m getFlag ModuleToClassFlags)
+ final def newModule(name: TermName, pos: Position = NoPosition, newFlags0: Long = 0L): ModuleSymbol = {
+ val newFlags = newFlags0 | MODULE
+ val m = newModuleSymbol(name, pos, newFlags)
+ val clazz = newModuleClass(name.toTypeName, pos, newFlags & ModuleToClassFlags)
connectModuleToClass(m, clazz)
}
@@ -228,9 +248,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
- final def newModuleAndClassSymbol(name: Name, pos: Position, flags: FlagSet): (ModuleSymbol, ClassSymbol) = {
- val m = newModuleSymbol(name, pos, flags | MODULE)
- val c = newModuleClass(name.toTypeName, pos, m getFlag ModuleToClassFlags)
+ final def newModuleAndClassSymbol(name: Name, pos: Position, flags0: FlagSet): (ModuleSymbol, ClassSymbol) = {
+ val flags = flags0 | MODULE
+ val m = newModuleSymbol(name, pos, flags)
+ val c = newModuleClass(name.toTypeName, pos, flags & ModuleToClassFlags)
connectModuleToClass(m, c)
(m, c)
}
@@ -295,14 +316,31 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
newAbstractType(name, pos, EXISTENTIAL | newFlags)
- /** Synthetic value parameters when parameter symbols are not available
- */
- final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = {
+ private def freshNamer: () => TermName = {
var cnt = 0
- def freshName() = { cnt += 1; nme.syntheticParamName(cnt) }
- mmap(argtypess)(tp => newValueParameter(freshName(), owner.pos.focus, SYNTHETIC) setInfo tp)
+ () => { cnt += 1; nme.syntheticParamName(cnt) }
}
+ /** Synthetic value parameters when parameter symbols are not available
+ */
+ final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] =
+ argtypess map (xs => newSyntheticValueParams(xs, freshNamer))
+
+ /** Synthetic value parameters when parameter symbols are not available.
+ * Calling this method multiple times will re-use the same parameter names.
+ */
+ final def newSyntheticValueParams(argtypes: List[Type]): List[TermSymbol] =
+ newSyntheticValueParams(argtypes, freshNamer)
+
+ final def newSyntheticValueParams(argtypes: List[Type], freshName: () => TermName): List[TermSymbol] =
+ argtypes map (tp => newSyntheticValueParam(tp, freshName()))
+
+ /** Synthetic value parameter when parameter symbol is not available.
+ * Calling this method multiple times will re-use the same parameter name.
+ */
+ final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol =
+ newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype
+
def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L)
def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
@@ -318,7 +356,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// don't test directly -- use isGADTSkolem
// used to single out a gadt skolem symbol in deskolemizeGADT
// gadtskolems are created in adaptConstrPattern and removed at the end of typedCase
- @inline final protected[Symbols] def GADT_SKOLEM_FLAGS = CASEACCESSOR | SYNTHETIC
+ final protected[Symbols] def GADT_SKOLEM_FLAGS = CASEACCESSOR | SYNTHETIC
// flags set up to maintain TypeSkolem's invariant: origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
// GADT_SKOLEM_FLAGS (== CASEACCESSOR | SYNTHETIC) used to single this symbol out in deskolemizeGADT
@@ -329,18 +367,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def freshExistential(suffix: String): TypeSymbol =
newExistential(freshExistentialName(suffix), pos)
- /** Synthetic value parameters when parameter symbols are not available.
- * Calling this method multiple times will re-use the same parameter names.
- */
- final def newSyntheticValueParams(argtypes: List[Type]): List[TermSymbol] =
- newSyntheticValueParamss(List(argtypes)).head
-
- /** Synthetic value parameter when parameter symbol is not available.
- * Calling this method multiple times will re-use the same parameter name.
- */
- final def newSyntheticValueParam(argtype: Type): Symbol =
- newSyntheticValueParams(List(argtype)).head
-
/** Type skolems are type parameters ''seen from the inside''
* Assuming a polymorphic method m[T], its type is a PolyType which has a TypeParameter
* with name `T` in its typeParams list. While type checking the parameters, result type and
@@ -392,6 +418,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case x: TermName => newErrorValue(x)
}
+ /** Creates a placeholder symbol for when a name is encountered during
+ * unpickling for which there is no corresponding classfile. This defers
+ * failure to the point when that name is used for something, which is
+ * often to the point of never.
+ */
+ def newStubSymbol(name: Name): Symbol = name match {
+ case n: TypeName => new StubClassSymbol(this, n)
+ case _ => new StubTermSymbol(this, name.toTermName)
+ }
+
@deprecated("Use the other signature", "2.10.0")
def newClass(pos: Position, name: TypeName): Symbol = newClass(name, pos)
@deprecated("Use the other signature", "2.10.0")
@@ -464,14 +500,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isAliasType = false
def isAbstractType = false
def isSkolem = false
- def isMacro = this hasFlag MACRO
/** A Type, but not a Class. */
def isNonClassType = false
/** The bottom classes are Nothing and Null, found in Definitions. */
def isBottomClass = false
- def isSpecialized = this hasFlag SPECIALIZED
/** These are all tests for varieties of ClassSymbol, which has these subclasses:
* - ModuleClassSymbol
@@ -502,6 +536,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isTypeParameterOrSkolem = false
def isTypeSkolem = false
def isTypeMacro = false
+ def isInvariant = !isCovariant && !isContravariant
/** Qualities of Terms, always false for TypeSymbols.
*/
@@ -559,11 +594,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
&& owner.isPackageClass
&& nme.isReplWrapperName(name)
)
- @inline final def getFlag(mask: Long): Long = flags & mask
+ final def getFlag(mask: Long): Long = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ flags & mask
+ }
/** Does symbol have ANY flag in `mask` set? */
- @inline final def hasFlag(mask: Long): Boolean = (flags & mask) != 0
+ final def hasFlag(mask: Long): Boolean = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ (flags & mask) != 0
+ }
/** Does symbol have ALL the flags in `mask` set? */
- @inline final def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
+ final def hasAllFlags(mask: Long): Boolean = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ (flags & mask) == mask
+ }
def setFlag(mask: Long): this.type = { _rawflags |= mask ; this }
def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this }
@@ -670,11 +714,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
|| hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
)
def hasBridgeAnnotation = hasAnnotation(BridgeClass)
- def hasStaticAnnotation = hasAnnotation(StaticClass)
def isDeprecated = hasAnnotation(DeprecatedAttr)
def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0)
+ def hasDeprecatedInheritanceAnnotation
+ = hasAnnotation(DeprecatedInheritanceAttr)
+ def deprecatedInheritanceMessage
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
+ def deprecatedInheritanceVersion
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
+ def hasDeprecatedOverridingAnnotation
+ = hasAnnotation(DeprecatedOverridingAttr)
+ def deprecatedOverridingMessage
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
+ def deprecatedOverridingVersion
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
// !!! when annotation arguments are not literal strings, but any sort of
// assembly of strings, there is a fair chance they will turn up here not as
@@ -690,13 +745,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this symbol an accessor method for outer? */
final def isOuterAccessor = {
- hasFlag(STABLE | HIDDEN) &&
+ hasFlag(STABLE | ARTIFACT) &&
originalName == nme.OUTER
}
/** Is this symbol an accessor method for outer? */
final def isOuterField = {
- hasFlag(HIDDEN) &&
+ hasFlag(ARTIFACT) &&
originalName == nme.OUTER_LOCAL
}
@@ -831,8 +886,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isInitialized: Boolean =
validTo != NoPeriod
- // [Eugene] todo. needs to be reviewed and [only then] rewritten without explicit returns
- /** Determines whether this symbol can be loaded by subsequent reflective compilation */
+ /** Can this symbol be loaded by a reflective mirror?
+ *
+ * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
+ * Such annotations (also called "pickles") are applied on top-level classes and include information
+ * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
+ * are typically unreachable and information about them gets lost.
+ *
+ * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
+ * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
+ */
final def isLocatable: Boolean = {
if (this == NoSymbol) return false
if (isRoot || isRootPackage) return true
@@ -876,7 +939,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
def owner: Symbol = {
- Statistics.incCounter(ownerCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
rawowner
}
@@ -924,7 +987,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** If this symbol has an expanded name, its original name, otherwise its name itself.
* @see expandName
*/
- def originalName: Name = nme.originalName(name)
+ def originalName: Name = nme.originalName(nme.dropLocalSuffix(name))
/** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
*/
@@ -969,7 +1032,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def fullNameInternal(separator: Char): Name = (
if (isRoot || isRootPackage || this == NoSymbol) name
else if (owner.isEffectiveRoot) name
- else effectiveOwner.enclClass.fullNameAsName(separator) append separator append name
+ else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
)
def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
@@ -1093,7 +1156,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** See comment in HasFlags for how privateWithin combines with flags.
*/
private[this] var _privateWithin: Symbol = _
- def privateWithin = _privateWithin
+ def privateWithin = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ _privateWithin
+ }
def privateWithin_=(sym: Symbol) { _privateWithin = sym }
def setPrivateWithin(sym: Symbol): this.type = { privateWithin_=(sym) ; this }
@@ -1284,6 +1350,46 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
this
}
+ /** Called when the programmer requests information that might require initialization of the underlying symbol.
+ *
+ * `isFlagRelated` and `mask` describe the nature of this information.
+ * isFlagRelated = true means that the programmer needs particular bits in flags.
+ * isFlagRelated = false means that the request is unrelated to flags (annotations or privateWithin).
+ *
+ * In our current architecture, symbols for top-level classes and modules
+ * are created as dummies. Package symbols just call newClass(name) or newModule(name) and
+ * consider their job done.
+ *
+ * In order for such a dummy to provide meaningful info (e.g. a list of its members),
+ * it needs to go through unpickling. Unpickling is a process of reading Scala metadata
+ * from ScalaSignature annotations and assigning it to symbols and types.
+ *
+ * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
+ * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
+ * (i.e. the pickle not only contains info about directly nested classes/modules, but also about
+ * classes/modules nested into those and so on).
+ *
+ * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
+ * This happens because package symbols assign completer thunks to the dummies they create.
+ * Therefore metadata loading happens lazily and transparently.
+ *
+ * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
+ * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
+ * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
+ * produces incorrect results.
+ *
+ * One might think that the solution is simple: automatically call the completer whenever one needs
+ * flags, annotations and privateWithin - just like it's done for typeSignature. Unfortunately, this
+ * leads to weird crashes in scalac, and currently we can't attempt to fix the core of the compiler
+ * risk stability a few weeks before the final release.
+ *
+ * However we do need to fix this for runtime reflection, since it's not something we'd like to
+ * expose to reflection users. Therefore a proposed solution is to check whether we're in a
+ * runtime reflection universe and if yes then to commence initialization.
+ */
+ protected def needsInitialize(isFlagRelated: Boolean, mask: Long) =
+ !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
+
/** Was symbol's type updated during given phase? */
final def isUpdatedAt(pid: Phase#Id): Boolean = {
assert(isCompilerUniverse)
@@ -1365,7 +1471,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The value parameter sections of this symbol.
*/
def paramss: List[List[Symbol]] = info.paramss
- def hasParamWhich(cond: Symbol => Boolean) = mexists(paramss)(cond)
/** The least proper supertype of a class; includes all parent types
* and refinement where needed. You need to compute that in a situation like this:
@@ -1425,7 +1530,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def makeSerializable() {
info match {
case ci @ ClassInfoType(_, _, _) =>
- updateInfo(ci.copy(parents = ci.parents :+ SerializableClass.tpe))
+ setInfo(ci.copy(parents = ci.parents :+ SerializableClass.tpe))
case i =>
abort("Only ClassInfoTypes can be made serializable: "+ i)
}
@@ -1447,8 +1552,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** After the typer phase (before, look at the definition's Modifiers), contains
* the annotations attached to member a definition (class, method, type, field).
*/
- def annotations: List[AnnotationInfo] =
+ def annotations: List[AnnotationInfo] = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
_annotations
+ }
def setAnnotations(annots: List[AnnotationInfo]): this.type = {
_annotations = annots
@@ -1514,7 +1621,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def alternatives: List[Symbol] =
if (isOverloaded) info.asInstanceOf[OverloadedType].alternatives
- else List(this)
+ else this :: Nil
def filter(cond: Symbol => Boolean): Symbol =
if (isOverloaded) {
@@ -1554,7 +1661,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setInfo (this.info cloneInfo clone)
setAnnotations this.annotations
)
- this.attachments.all.foreach(clone.addAttachment)
+ this.attachments.all.foreach(clone.updateAttachment)
if (clone.thisSym != clone)
clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
@@ -1618,7 +1725,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
(info.decls filter (_.isCaseAccessorMethod)).toList
final def constrParamAccessors: List[Symbol] =
- info.decls.toList filter (sym => !sym.isMethod && sym.isParamAccessor)
+ info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList
/** The symbol accessed by this accessor (getter or setter) function. */
final def accessed: Symbol = accessed(owner.info)
@@ -1770,26 +1877,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
} else owner.enclosingTopLevelClass
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
- def isCoDefinedWith(that: Symbol) = {
- (this.rawInfo ne NoType) &&
- (this.effectiveOwner == that.effectiveOwner) && {
- !this.effectiveOwner.isPackageClass ||
- (this.sourceFile eq null) ||
- (that.sourceFile eq null) ||
- (this.sourceFile == that.sourceFile) || {
- // recognize companion object in separate file and fail, else compilation
- // appears to succeed but highly opaque errors come later: see bug #1286
- if (this.sourceFile.path != that.sourceFile.path) {
- // The cheaper check can be wrong: do the expensive normalization
- // before failing.
- if (this.sourceFile.canonicalPath != that.sourceFile.canonicalPath)
- throw InvalidCompanions(this, that)
- }
-
- false
- }
- }
- }
+ def isCoDefinedWith(that: Symbol) = (
+ (this.rawInfo ne NoType)
+ && (this.effectiveOwner == that.effectiveOwner)
+ && ( !this.effectiveOwner.isPackageClass
+ || (this.sourceFile eq null)
+ || (that.sourceFile eq null)
+ || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization
+ || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath)
+ )
+ )
/** The internal representation of classes and objects:
*
@@ -1884,9 +1981,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param ofclazz The class containing the symbol's definition
* @param site The base type from which member types are computed
*/
- final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol =
- ofclazz.info.nonPrivateDecl(name).filter(sym =>
- !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+ final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = {
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: ofclazz.info.nonPrivateDecl(name) filter (sym =>
+ // !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+ val result = ofclazz.info.nonPrivateDecl(name)
+ def qualifies(sym: Symbol) = !sym.isTerm || (site.memberType(this) matches site.memberType(sym))
+ if ((result eq NoSymbol) || !result.isOverloaded && qualifies(result)) result
+ else result filter qualifies
+ }
/** The non-private member of `site` whose type and name match the type of this symbol. */
final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
@@ -2039,21 +2142,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* of sourceFile (which is expected at least in the IDE only to
* return actual source code.) So sourceFile has classfiles filtered out.
*/
- private def sourceFileOnly(file: AbstractFileType): AbstractFileType =
+ private def sourceFileOnly(file: AbstractFile): AbstractFile =
if ((file eq null) || (file.path endsWith ".class")) null else file
- private def binaryFileOnly(file: AbstractFileType): AbstractFileType =
+ private def binaryFileOnly(file: AbstractFile): AbstractFile =
if ((file eq null) || !(file.path endsWith ".class")) null else file
- final def binaryFile: AbstractFileType = binaryFileOnly(associatedFile)
- final def sourceFile: AbstractFileType = sourceFileOnly(associatedFile)
+ final def binaryFile: AbstractFile = binaryFileOnly(associatedFile)
+ final def sourceFile: AbstractFile = sourceFileOnly(associatedFile)
/** Overridden in ModuleSymbols to delegate to the module class. */
- def associatedFile: AbstractFileType = enclosingTopLevelClass.associatedFile
- def associatedFile_=(f: AbstractFileType) { abort("associatedFile_= inapplicable for " + this) }
+ def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
+ def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
@deprecated("Use associatedFile_= instead", "2.10.0")
- def sourceFile_=(f: AbstractFileType): Unit = associatedFile_=(f)
+ def sourceFile_=(f: AbstractFile): Unit = associatedFile_=(f)
/** If this is a sealed class, its known direct subclasses.
* Otherwise, the empty set.
@@ -2162,10 +2265,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* If settings.Yshowsymkinds, adds abbreviated symbol kind.
*/
def nameString: String = (
- if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + decodedName
- else if (settings.uniqid.value && !settings.Yshowsymkinds.value) decodedName + "#" + id
- else if (!settings.uniqid.value && settings.Yshowsymkinds.value) decodedName + "#" + abbreviatedKindString
- else decodedName + "#" + id + "#" + abbreviatedKindString
+ if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + originalName.decode
+ else if (settings.uniqid.value && !settings.Yshowsymkinds.value) originalName.decode + "#" + id
+ else if (!settings.uniqid.value && settings.Yshowsymkinds.value) originalName.decode + "#" + abbreviatedKindString
+ else originalName.decode + "#" + id + "#" + abbreviatedKindString
)
def fullNameString: String = {
@@ -2272,12 +2375,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawname: TermName = initName
def rawname = _rawname
def name = {
- Statistics.incCounter(nameCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
_rawname
}
- def name_=(name: Name) {
+ override def name_=(name: Name) {
if (name != rawname) {
- log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
+ super.name_=(name) // logging
changeNameInOwners(name)
_rawname = name.toTermName
}
@@ -2437,19 +2540,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private var flatname: TermName = null
override def associatedFile = moduleClass.associatedFile
- override def associatedFile_=(f: AbstractFileType) { moduleClass.associatedFile = f }
+ override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f }
override def moduleClass = referenced
override def companionClass =
flatOwnerInfo.decl(name.toTypeName).suchThat(sym => sym.isClass && (sym isCoDefinedWith this))
override def owner = {
- Statistics.incCounter(ownerCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
}
override def name: TermName = {
- Statistics.incCounter(nameCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
@@ -2543,7 +2646,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def rawname = _rawname
def name = {
- Statistics.incCounter(nameCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
_rawname
}
final def asNameType(n: Name) = n.toTypeName
@@ -2578,9 +2681,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// TODO - don't allow names to be renamed in this unstructured a fashion.
// Rename as little as possible. Enforce invariants on all renames.
- def name_=(name: Name) {
+ override def name_=(name: Name) {
if (name != rawname) {
- log("Renaming %s %s %s to %s".format(shortSymbolClass, debugFlagString, rawname, name))
+ super.name_=(name) // logging
changeNameInOwners(name)
_rawname = name.toTypeName
}
@@ -2681,7 +2784,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- Statistics.incCounter(typeSymbolCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(typeSymbolCount)
}
implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
@@ -2741,9 +2844,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
extends TypeSymbol(initOwner, initPos, initName) with ClassSymbolApi {
type TypeOfClonedSymbol = ClassSymbol
- private[this] var flatname: TypeName = _
- private[this] var _associatedFile: AbstractFileType = _
- private[this] var thissym: Symbol = this
+ private[this] var flatname: TypeName = _
+ private[this] var _associatedFile: AbstractFile = _
+ private[this] var thissym: Symbol = this
private[this] var thisTypeCache: Type = _
private[this] var thisTypePeriod = NoPeriod
@@ -2841,7 +2944,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
- override def associatedFile_=(f: AbstractFileType) { _associatedFile = f }
+ override def associatedFile_=(f: AbstractFile) { _associatedFile = f }
override def reset(completer: Type): this.type = {
super.reset(completer)
@@ -2860,12 +2963,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
override def owner: Symbol = {
- Statistics.incCounter(ownerCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
if (needsFlatClasses) rawowner.owner else rawowner
}
override def name: TypeName = {
- Statistics.incCounter(nameCount)
+ if (Statistics.canEnable) Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
@@ -2904,7 +3007,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def children = childSet
override def addChild(sym: Symbol) { childSet = childSet + sym }
- Statistics.incCounter(classSymbolCount)
+ if (Statistics.hotEnabled) Statistics.incCounter(classSymbolCount)
}
implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
@@ -2998,6 +3101,37 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
|| info.parents.exists(_.typeSymbol hasTransOwner sym)
)
}
+ trait StubSymbol extends Symbol {
+ protected def stubWarning = {
+ val from = if (associatedFile == null) "" else s" - referenced from ${associatedFile.canonicalPath}"
+ s"$kindString $nameString$locationString$from (a classfile may be missing)"
+ }
+ private def fail[T](alt: T): T = {
+ // Avoid issuing lots of redundant errors
+ if (!hasFlag(IS_ERROR)) {
+ globalError(s"bad symbolic reference to " + stubWarning)
+ if (settings.debug.value)
+ (new Throwable).printStackTrace
+
+ this setFlag IS_ERROR
+ }
+ alt
+ }
+ // This one doesn't call fail because SpecializeTypes winds up causing
+ // isMonomorphicType to be called, which calls this, which would fail us
+ // in all the scenarios we're trying to keep from failing.
+ override def originalInfo = NoType
+ override def associatedFile = owner.associatedFile
+ override def info = fail(NoType)
+ override def rawInfo = fail(NoType)
+ override def companionSymbol = fail(NoSymbol)
+
+ locally {
+ debugwarn("creating stub symbol for " + stubWarning)
+ }
+ }
+ class StubClassSymbol(owner0: Symbol, name0: TypeName) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubTermSymbol(owner0: Symbol, name0: TermName) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
trait FreeSymbol extends Symbol {
def origin: String
@@ -3007,9 +3141,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
implicit val FreeTermSymbolTag = ClassTag[FreeTermSymbol](classOf[FreeTermSymbol])
- class FreeTypeSymbol(name0: TypeName, value0: => Any, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi {
- def value = value0
- }
+ class FreeTypeSymbol(name0: TypeName, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi
implicit val FreeTypeSymbolTag = ClassTag[FreeTypeSymbol](classOf[FreeTypeSymbol])
/** An object representing a missing symbol */
@@ -3020,7 +3152,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def asNameType(n: Name) = n.toTermName
def rawname = nme.NO_NAME
def name = nme.NO_NAME
- def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
+ override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
synchronized {
setInfo(NoType)
@@ -3145,13 +3277,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (settings.debug.value) printStackTrace()
}
- case class InvalidCompanions(sym1: Symbol, sym2: Symbol) extends Throwable({
- "Companions '" + sym1 + "' and '" + sym2 + "' must be defined in same file:\n" +
- " Found in " + sym1.sourceFile.canonicalPath + " and " + sym2.sourceFile.canonicalPath
- }) {
- override def toString = getMessage
- }
-
/** A class for type histories */
private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
@@ -3163,7 +3288,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
}
+// ----- Hoisted closures and convenience methods, for compile time reductions -------
+
+ private[scala] final val symbolIsPossibleInRefinement = (sym: Symbol) => sym.isPossibleInRefinement
+ private[scala] final val symbolIsNonVariant = (sym: Symbol) => sym.variance == 0
+
+ @tailrec private[scala] final
+ def allSymbolsHaveOwner(syms: List[Symbol], owner: Symbol): Boolean = syms match {
+ case sym :: rest => sym.owner == owner && allSymbolsHaveOwner(rest, owner)
+ case _ => true
+ }
+
+
+// -------------- Statistics --------------------------------------------------------
+
Statistics.newView("#symbols")(ids)
+
}
object SymbolsStats {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index d160695e67..ebf0998573 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -194,7 +194,7 @@ abstract class TreeGen extends macros.TreeBuilder {
mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree)
private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = {
- val tapp = mkAttributedTypeApply(value, what, List(tpe.normalize))
+ val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil)
if (wrapInApply) Apply(tapp, Nil) else tapp
}
private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf
@@ -272,9 +272,6 @@ abstract class TreeGen extends macros.TreeBuilder {
def mkOr(tree1: Tree, tree2: Tree): Tree =
Apply(Select(tree1, Boolean_or), List(tree2))
- def mkBasisUniverseRef: Tree =
- mkAttributedRef(ReflectBasis) setType singleType(ReflectBasis.owner.thisPrefix, ReflectBasis)
-
def mkRuntimeUniverseRef: Tree = {
assert(ReflectRuntimeUniverse != NoSymbol)
mkAttributedRef(ReflectRuntimeUniverse) setType singleType(ReflectRuntimeUniverse.owner.thisPrefix, ReflectRuntimeUniverse)
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 1b4c1b2877..6ef4c3f660 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -17,7 +17,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass }
+ import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -67,7 +67,7 @@ abstract class TreeInfo {
/** Is tree an expression which can be inlined without affecting program semantics?
*
- * Note that this is not called "isExprSafeToInline" since purity (lack of side-effects)
+ * Note that this is not called "isExprPure" since purity (lack of side-effects)
* is not the litmus test. References to modules and lazy vals are side-effecting,
* both because side-effecting code may be executed and because the first reference
* takes a different code path than all to follow; but they are safe to inline
@@ -236,7 +236,7 @@ abstract class TreeInfo {
case _ =>
tree
}
-
+
/** Is tree a self or super constructor call? */
def isSelfOrSuperConstrCall(tree: Tree) = {
// stripNamedApply for SI-3584: adaptToImplicitMethod in Typers creates a special context
@@ -372,6 +372,13 @@ abstract class TreeInfo {
case _ => EmptyTree
}
+ /** If this tree represents a type application the type arguments. Otherwise Nil.
+ */
+ def typeArguments(tree: Tree): List[Tree] = tree match {
+ case TypeApply(_, targs) => targs
+ case _ => Nil
+ }
+
/** If this tree has type parameters, those. Otherwise Nil.
*/
def typeParameters(tree: Tree): List[TypeDef] = tree match {
@@ -514,20 +521,18 @@ abstract class TreeInfo {
*/
def noPredefImportForUnit(body: Tree) = {
// Top-level definition whose leading imports include Predef.
- def containsLeadingPredefImport(defs: List[Tree]): Boolean = defs match {
- case PackageDef(_, defs1) :: _ => containsLeadingPredefImport(defs1)
- case Import(expr, _) :: rest => isReferenceToPredef(expr) || containsLeadingPredefImport(rest)
- case _ => false
+ def isLeadingPredefImport(defn: Tree): Boolean = defn match {
+ case PackageDef(_, defs1) => defs1 exists isLeadingPredefImport
+ case Import(expr, _) => isReferenceToPredef(expr)
+ case _ => false
}
-
// Compilation unit is class or object 'name' in package 'scala'
def isUnitInScala(tree: Tree, name: Name) = tree match {
case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name)
case _ => false
}
- ( isUnitInScala(body, nme.Predef)
- || containsLeadingPredefImport(List(body)))
+ isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body)
}
def isAbsTypeDef(tree: Tree) = tree match {
@@ -584,4 +589,24 @@ abstract class TreeInfo {
object DynamicUpdate extends DynamicApplicationExtractor(_ == nme.updateDynamic)
object DynamicApplication extends DynamicApplicationExtractor(isApplyDynamicName)
object DynamicApplicationNamed extends DynamicApplicationExtractor(_ == nme.applyDynamicNamed)
+
+ object MacroImplReference {
+ private def refPart(tree: Tree): Tree = tree match {
+ case TypeApply(fun, _) => refPart(fun)
+ case ref: RefTree => ref
+ case _ => EmptyTree
+ }
+
+ def unapply(tree: Tree) = refPart(tree) match {
+ case ref: RefTree => Some((ref.qualifier.symbol, ref.symbol, typeArguments(tree)))
+ case _ => None
+ }
+ }
+
+ def isNullaryInvocation(tree: Tree): Boolean =
+ tree.symbol != null && tree.symbol.isMethod && (tree match {
+ case TypeApply(fun, _) => isNullaryInvocation(fun)
+ case tree: RefTree => true
+ case _ => false
+ })
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 220869e4d2..7ec9f7086d 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -7,8 +7,7 @@ package scala.reflect
package internal
import Flags._
-import base.Attachments
-import collection.mutable.{ListBuffer, LinkedHashSet}
+import scala.collection.mutable.{ListBuffer, LinkedHashSet}
import util.Statistics
trait Trees extends api.Trees { self: SymbolTable =>
@@ -19,17 +18,17 @@ trait Trees extends api.Trees { self: SymbolTable =>
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
- Statistics.incCounter(TreesStats.nodeByType, getClass)
+ if (Statistics.canEnable) Statistics.incCounter(TreesStats.nodeByType, getClass)
- @inline final override def pos: Position = rawatt.pos
+ final override def pos: Position = rawatt.pos
private[this] var rawtpe: Type = _
- @inline final def tpe = rawtpe
+ final def tpe = rawtpe
def tpe_=(t: Type) = rawtpe = t
def setType(tp: Type): this.type = { rawtpe = tp; this }
def defineType(tp: Type): this.type = setType(tp)
- def symbol: Symbol = null
+ def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch?
def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
def hasSymbol = false
@@ -137,7 +136,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = {
- val s = collection.mutable.LinkedHashSet[S]()
+ val s = scala.collection.mutable.LinkedHashSet[S]()
def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S]
for (t <- this) {
addIfFree(t.symbol)
@@ -160,7 +159,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
new ThisSubstituter(clazz, to) transform this
def hasSymbolWhich(f: Symbol => Boolean) =
- hasSymbol && symbol != null && f(symbol)
+ (symbol ne null) && (symbol ne NoSymbol) && f(symbol)
def isErroneous = (tpe ne null) && tpe.isErroneous
def isTyped = (tpe ne null) && !tpe.isErroneous
@@ -210,7 +209,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
trait TypTree extends Tree with TypTreeApi
- trait SymTree extends Tree with SymTreeContextApi {
+ abstract class SymTree extends Tree with SymTreeContextApi {
override def hasSymbol = true
override var symbol: Symbol = NoSymbol
}
@@ -230,6 +229,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
case object EmptyTree extends TermTree {
+ val asList = List(this)
super.tpe_=(NoType)
override def tpe_=(t: Type) =
if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
@@ -290,7 +290,10 @@ trait Trees extends api.Trees { self: SymbolTable =>
object LabelDef extends LabelDefExtractor
case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) extends ImportSelectorApi
- object ImportSelector extends ImportSelectorExtractor
+ object ImportSelector extends ImportSelectorExtractor {
+ val wild = ImportSelector(nme.WILDCARD, -1, null, -1)
+ val wildList = List(wild)
+ }
case class Import(expr: Tree, selectors: List[ImportSelector])
extends SymTree with ImportApi
@@ -324,12 +327,26 @@ trait Trees extends api.Trees { self: SymbolTable =>
extends TermTree with UnApplyApi
object UnApply extends UnApplyExtractor
- case class ArrayValue(elemtpt: Tree, elems: List[Tree])
- extends TermTree with ArrayValueApi
- object ArrayValue extends ArrayValueExtractor
+ /** An array of expressions. This AST node needs to be translated in backend.
+ * It is used to pass arguments to vararg arguments.
+ * Introduced by compiler phase uncurry.
+ *
+ * This AST node does not have direct correspondence to Scala code,
+ * and is used to pass arguments to vararg arguments. For instance:
+ *
+ * printf("%s%d", foo, 42)
+ *
+ * Is translated to after compiler phase uncurry to:
+ *
+ * Apply(
+ * Ident("printf"),
+ * Literal("%s%d"),
+ * ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
+ */
+ case class ArrayValue(elemtpt: Tree, elems: List[Tree]) extends TermTree
case class Function(vparams: List[ValDef], body: Tree)
- extends TermTree with SymTree with FunctionApi
+ extends SymTree with TermTree with FunctionApi
object Function extends FunctionExtractor
case class Assign(lhs: Tree, rhs: Tree)
@@ -349,7 +366,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
object Match extends MatchExtractor
case class Return(expr: Tree)
- extends TermTree with SymTree with ReturnApi
+ extends SymTree with TermTree with ReturnApi
object Return extends ReturnExtractor
case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
@@ -396,9 +413,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args)
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree with ApplyDynamicApi
- object ApplyDynamic extends ApplyDynamicExtractor
+ case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
override def symbol: Symbol = qual.symbol
@@ -407,7 +422,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
object Super extends SuperExtractor
case class This(qual: TypeName)
- extends TermTree with SymTree with ThisApi
+ extends SymTree with TermTree with ThisApi
object This extends ThisExtractor
case class Select(qualifier: Tree, name: Name)
@@ -443,7 +458,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
object SingletonTypeTree extends SingletonTypeTreeExtractor
case class SelectFromTypeTree(qualifier: Tree, name: TypeName)
- extends TypTree with RefTree with SelectFromTypeTreeApi
+ extends RefTree with TypTree with SelectFromTypeTreeApi
object SelectFromTypeTree extends SelectFromTypeTreeExtractor
case class CompoundTypeTree(templ: Template)
@@ -492,7 +507,13 @@ trait Trees extends api.Trees { self: SymbolTable =>
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
- class StrictTreeCopier extends TreeCopierOps {
+ override type TreeCopier <: InternalTreeCopierOps
+ abstract class InternalTreeCopierOps extends TreeCopierOps {
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
+ def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
+ }
+
+ class StrictTreeCopier extends InternalTreeCopierOps {
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) =
new ClassDef(mods, name.toTypeName, tparams, impl).copyAttrs(tree)
def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) =
@@ -586,7 +607,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
}
- class LazyTreeCopier extends TreeCopierOps {
+ class LazyTreeCopier extends InternalTreeCopierOps {
val treeCopy: TreeCopier = newStrictTreeCopier
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) = tree match {
case t @ ClassDef(mods0, name0, tparams0, impl0)
@@ -979,12 +1000,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
*/
def New(tpt: Tree, argss: List[List[Tree]]): Tree = argss match {
case Nil => ApplyConstructor(tpt, Nil)
- case xs :: rest => {
- def mkApply(fun: Tree, args: List[Tree]) = Apply(fun, args)
- rest.foldLeft(ApplyConstructor(tpt, xs): Tree)(mkApply)
- // [Eugene++] no longer compiles after I moved the `Apply` case class here
- // rest.foldLeft(ApplyConstructor(tpt, xs): Tree)(Apply)
- }
+ case xs :: rest => rest.foldLeft(ApplyConstructor(tpt, xs): Tree)(Apply.apply)
}
/** 0-1 argument list new, based on a type.
@@ -1146,29 +1162,25 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ //OPT ordered according to frequency to speed it up.
override protected def itransform(transformer: Transformer, tree: Tree): Tree = {
import transformer._
val treeCopy = transformer.treeCopy
+
+ // begin itransform
tree match {
- case EmptyTree =>
- tree
- case PackageDef(pid, stats) =>
- treeCopy.PackageDef(
- tree, transform(pid).asInstanceOf[RefTree],
- atOwner(mclass(tree.symbol)) {
- transformStats(stats, currentOwner)
- }
- )
- case ClassDef(mods, name, tparams, impl) =>
- atOwner(tree.symbol) {
- treeCopy.ClassDef(tree, transformModifiers(mods), name,
- transformTypeDefs(tparams), transformTemplate(impl))
- }
- case ModuleDef(mods, name, impl) =>
- atOwner(mclass(tree.symbol)) {
- treeCopy.ModuleDef(tree, transformModifiers(mods),
- name, transformTemplate(impl))
- }
+ case Ident(name) =>
+ treeCopy.Ident(tree, name)
+ case Select(qualifier, selector) =>
+ treeCopy.Select(tree, transform(qualifier), selector)
+ case Apply(fun, args) =>
+ treeCopy.Apply(tree, transform(fun), transformTrees(args))
+ case TypeTree() =>
+ treeCopy.TypeTree(tree)
+ case Literal(value) =>
+ treeCopy.Literal(tree, value)
+ case This(qual) =>
+ treeCopy.This(tree, qual)
case ValDef(mods, name, tpt, rhs) =>
atOwner(tree.symbol) {
treeCopy.ValDef(tree, transformModifiers(mods),
@@ -1180,73 +1192,70 @@ trait Trees extends api.Trees { self: SymbolTable =>
transformTypeDefs(tparams), transformValDefss(vparamss),
transform(tpt), transform(rhs))
}
- case TypeDef(mods, name, tparams, rhs) =>
- atOwner(tree.symbol) {
- treeCopy.TypeDef(tree, transformModifiers(mods), name,
- transformTypeDefs(tparams), transform(rhs))
- }
- case LabelDef(name, params, rhs) =>
- treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LamdaLifter.proxy'
- case Import(expr, selectors) =>
- treeCopy.Import(tree, transform(expr), selectors)
- case Template(parents, self, body) =>
- treeCopy.Template(tree, transformTrees(parents), transformValDef(self), transformStats(body, tree.symbol))
case Block(stats, expr) =>
treeCopy.Block(tree, transformStats(stats, currentOwner), transform(expr))
+ case If(cond, thenp, elsep) =>
+ treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep))
case CaseDef(pat, guard, body) =>
treeCopy.CaseDef(tree, transform(pat), transform(guard), transform(body))
- case Alternative(trees) =>
- treeCopy.Alternative(tree, transformTrees(trees))
- case Star(elem) =>
- treeCopy.Star(tree, transform(elem))
+ case TypeApply(fun, args) =>
+ treeCopy.TypeApply(tree, transform(fun), transformTrees(args))
+ case AppliedTypeTree(tpt, args) =>
+ treeCopy.AppliedTypeTree(tree, transform(tpt), transformTrees(args))
case Bind(name, body) =>
treeCopy.Bind(tree, name, transform(body))
- case UnApply(fun, args) =>
- treeCopy.UnApply(tree, fun, transformTrees(args)) // bq: see test/.../unapplyContexts2.scala
- case ArrayValue(elemtpt, trees) =>
- treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees))
case Function(vparams, body) =>
atOwner(tree.symbol) {
treeCopy.Function(tree, transformValDefs(vparams), transform(body))
}
+ case Match(selector, cases) =>
+ treeCopy.Match(tree, transform(selector), transformCaseDefs(cases))
+ case New(tpt) =>
+ treeCopy.New(tree, transform(tpt))
case Assign(lhs, rhs) =>
treeCopy.Assign(tree, transform(lhs), transform(rhs))
case AssignOrNamedArg(lhs, rhs) =>
treeCopy.AssignOrNamedArg(tree, transform(lhs), transform(rhs))
- case If(cond, thenp, elsep) =>
- treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep))
- case Match(selector, cases) =>
- treeCopy.Match(tree, transform(selector), transformCaseDefs(cases))
- case Return(expr) =>
- treeCopy.Return(tree, transform(expr))
case Try(block, catches, finalizer) =>
treeCopy.Try(tree, transform(block), transformCaseDefs(catches), transform(finalizer))
+ case EmptyTree =>
+ tree
case Throw(expr) =>
treeCopy.Throw(tree, transform(expr))
- case New(tpt) =>
- treeCopy.New(tree, transform(tpt))
- case Typed(expr, tpt) =>
- treeCopy.Typed(tree, transform(expr), transform(tpt))
- case TypeApply(fun, args) =>
- treeCopy.TypeApply(tree, transform(fun), transformTrees(args))
- case Apply(fun, args) =>
- treeCopy.Apply(tree, transform(fun), transformTrees(args))
- case ApplyDynamic(qual, args) =>
- treeCopy.ApplyDynamic(tree, transform(qual), transformTrees(args))
case Super(qual, mix) =>
treeCopy.Super(tree, transform(qual), mix)
- case This(qual) =>
- treeCopy.This(tree, qual)
- case Select(qualifier, selector) =>
- treeCopy.Select(tree, transform(qualifier), selector)
- case Ident(name) =>
- treeCopy.Ident(tree, name)
- case ReferenceToBoxed(idt) =>
- treeCopy.ReferenceToBoxed(tree, transform(idt) match { case idt1: Ident => idt1 })
- case Literal(value) =>
- treeCopy.Literal(tree, value)
- case TypeTree() =>
- treeCopy.TypeTree(tree)
+ case TypeBoundsTree(lo, hi) =>
+ treeCopy.TypeBoundsTree(tree, transform(lo), transform(hi))
+ case Typed(expr, tpt) =>
+ treeCopy.Typed(tree, transform(expr), transform(tpt))
+ case Import(expr, selectors) =>
+ treeCopy.Import(tree, transform(expr), selectors)
+ case Template(parents, self, body) =>
+ treeCopy.Template(tree, transformTrees(parents), transformValDef(self), transformStats(body, tree.symbol))
+ case ClassDef(mods, name, tparams, impl) =>
+ atOwner(tree.symbol) {
+ treeCopy.ClassDef(tree, transformModifiers(mods), name,
+ transformTypeDefs(tparams), transformTemplate(impl))
+ }
+ case ModuleDef(mods, name, impl) =>
+ atOwner(mclass(tree.symbol)) {
+ treeCopy.ModuleDef(tree, transformModifiers(mods),
+ name, transformTemplate(impl))
+ }
+ case TypeDef(mods, name, tparams, rhs) =>
+ atOwner(tree.symbol) {
+ treeCopy.TypeDef(tree, transformModifiers(mods), name,
+ transformTypeDefs(tparams), transform(rhs))
+ }
+ case LabelDef(name, params, rhs) =>
+ treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LamdaLifter.proxy'
+ case PackageDef(pid, stats) =>
+ treeCopy.PackageDef(
+ tree, transform(pid).asInstanceOf[RefTree],
+ atOwner(mclass(tree.symbol)) {
+ transformStats(stats, currentOwner)
+ }
+ )
case Annotated(annot, arg) =>
treeCopy.Annotated(tree, transform(annot), transform(arg))
case SingletonTypeTree(ref) =>
@@ -1255,12 +1264,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
treeCopy.SelectFromTypeTree(tree, transform(qualifier), selector)
case CompoundTypeTree(templ) =>
treeCopy.CompoundTypeTree(tree, transformTemplate(templ))
- case AppliedTypeTree(tpt, args) =>
- treeCopy.AppliedTypeTree(tree, transform(tpt), transformTrees(args))
- case TypeBoundsTree(lo, hi) =>
- treeCopy.TypeBoundsTree(tree, transform(lo), transform(hi))
case ExistentialTypeTree(tpt, whereClauses) =>
treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
+ case Return(expr) =>
+ treeCopy.Return(tree, transform(expr))
+ case Alternative(trees) =>
+ treeCopy.Alternative(tree, transformTrees(trees))
+ case Star(elem) =>
+ treeCopy.Star(tree, transform(elem))
+ case UnApply(fun, args) =>
+ treeCopy.UnApply(tree, fun, transformTrees(args)) // bq: see test/.../unapplyContexts2.scala
+ case ArrayValue(elemtpt, trees) =>
+ treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees))
+ case ApplyDynamic(qual, args) =>
+ treeCopy.ApplyDynamic(tree, transform(qual), transformTrees(args))
+ case ReferenceToBoxed(idt) =>
+ treeCopy.ReferenceToBoxed(tree, transform(idt) match { case idt1: Ident => idt1 })
case _ =>
xtransform(transformer, tree)
}
@@ -1541,6 +1560,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
sys.error("Not a LabelDef: " + t + "/" + t.getClass)
}
+// -------------- Classtags --------------------------------------------------------
+
implicit val TreeTag = ClassTag[Tree](classOf[Tree])
implicit val TermTreeTag = ClassTag[TermTree](classOf[TermTree])
implicit val TypTreeTag = ClassTag[TypTree](classOf[TypTree])
@@ -1567,7 +1588,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val StarTag = ClassTag[Star](classOf[Star])
implicit val BindTag = ClassTag[Bind](classOf[Bind])
implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply])
- implicit val ArrayValueTag = ClassTag[ArrayValue](classOf[ArrayValue])
implicit val FunctionTag = ClassTag[Function](classOf[Function])
implicit val AssignTag = ClassTag[Assign](classOf[Assign])
implicit val AssignOrNamedArgTag = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg])
@@ -1581,7 +1601,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
- implicit val ApplyDynamicTag = ClassTag[ApplyDynamic](classOf[ApplyDynamic])
implicit val SuperTag = ClassTag[Super](classOf[Super])
implicit val ThisTag = ClassTag[This](classOf[This])
implicit val SelectTag = ClassTag[Select](classOf[Select])
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index e9a9ce1aa0..fd5694b599 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -15,6 +15,7 @@ import scala.util.control.ControlThrowable
import scala.annotation.tailrec
import util.Statistics
import scala.runtime.ObjectRef
+import util.ThreeValues._
/* A standard type pattern match:
case ErrorType =>
@@ -117,13 +118,34 @@ trait Types extends api.Types { self: SymbolTable =>
class UndoLog extends Clearable {
private type UndoPairs = List[(TypeVar, TypeConstraint)]
- private var log: UndoPairs = List()
+ //OPT this method is public so we can do `manual inlining`
+ var log: UndoPairs = List()
+
+ /*
+ * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
+ *
+ * The idea behind explicit locking mechanism is that all public methods that access mutable state
+ * will have to obtain the lock for their entire execution so both reads and writes can be kept in
+ * right order. Originally, that was achieved by overriding those public methods in
+ * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
+ * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
+ * can go away.
+ *
+ * By using explicit locking we can achieve inlining.
+ *
+ * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
+ * places implementation of `undo` or `undoUnless`). This should be changed back to protected
+ * once inliner is fixed.
+ */
+ def lock(): Unit = ()
+ def unlock(): Unit = ()
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
/** Undo all changes to constraints to type variables upto `limit`. */
- private def undoTo(limit: UndoPairs) {
+ //OPT this method is public so we can do `manual inlining`
+ def undoTo(limit: UndoPairs) {
while ((log ne limit) && log.nonEmpty) {
val (tv, constr) = log.head
tv.constr = constr
@@ -140,30 +162,41 @@ trait Types extends api.Types { self: SymbolTable =>
}
def clear() {
- if (settings.debug.value)
- self.log("Clearing " + log.size + " entries from the undoLog.")
-
- log = Nil
+ lock()
+ try {
+ if (settings.debug.value)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
+ } finally unlock()
+ }
+ def size = {
+ lock()
+ try log.size finally unlock()
}
- def size = log.size
// `block` should not affect constraints on typevars
def undo[T](block: => T): T = {
- val before = log
+ lock()
+ try {
+ val before = log
- try block
- finally undoTo(before)
+ try block
+ finally undoTo(before)
+ } finally unlock()
}
// if `block` evaluates to false, it should not affect constraints on typevars
def undoUnless(block: => Boolean): Boolean = {
- val before = log
- var result = false
+ lock()
+ try {
+ val before = log
+ var result = false
- try result = block
- finally if (!result) undoTo(before)
+ try result = block
+ finally if (!result) undoTo(before)
- result
+ result
+ } finally unlock()
}
}
@@ -265,14 +298,14 @@ trait Types extends api.Types { self: SymbolTable =>
def declarations = decls
def typeArguments = typeArgs
def erasure = this match {
- case ConstantType(value) => widen.erasure // [Eugene to Martin] constant types are unaffected by erasure. weird.
+ case ConstantType(value) => widen.erasure
case _ =>
var result: Type = transformedType(this)
result = result.normalize match { // necessary to deal with erasures of HK types, typeConstructor won't work
case PolyType(undets, underlying) => existentialAbstraction(undets, underlying) // we don't want undets in the result
case _ => result
}
- // [Eugene] erasure screws up all ThisTypes for modules into PackageTypeRefs
+ // erasure screws up all ThisTypes for modules into PackageTypeRefs
// we need to unscrew them, or certain typechecks will fail mysteriously
// http://groups.google.com/group/scala-internals/browse_thread/thread/6d3277ae21b6d581
result = result.map(tpe => tpe match {
@@ -284,31 +317,8 @@ trait Types extends api.Types { self: SymbolTable =>
def substituteSymbols(from: List[Symbol], to: List[Symbol]): Type = substSym(from, to)
def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
- // [Eugene] to be discussed and refactored
- def isConcrete = {
- def notConcreteSym(sym: Symbol) =
- sym.isAbstractType && !sym.isExistential
-
- def notConcreteTpe(tpe: Type): Boolean = tpe match {
- case ThisType(_) => false
- case SuperType(_, _) => false
- case SingleType(pre, sym) => notConcreteSym(sym)
- case ConstantType(_) => false
- case TypeRef(_, sym, args) => notConcreteSym(sym) || (args exists (arg => notConcreteTpe(arg)))
- case RefinedType(_, _) => false
- case ExistentialType(_, _) => false
- case AnnotatedType(_, tp, _) => notConcreteTpe(tp)
- case _ => true
- }
-
- !notConcreteTpe(this)
- }
-
- // [Eugene] is this comprehensive?
- // the only thingies that we want to splice are: 1) type parameters, 2) type members
+ // the only thingies that we want to splice are: 1) type parameters, 2) abstract type members
// the thingies that we don't want to splice are: 1) concrete types (obviously), 2) existential skolems
- // this check seems to cover them all, right?
- // todo. after we discuss this, move the check to subclasses
def isSpliceable = {
this.isInstanceOf[TypeRef] && typeSymbol.isAbstractType && !typeSymbol.isExistential
}
@@ -347,8 +357,8 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def isImmediatelyDependent: Boolean = false
- /** Does this depend on an enclosing method parameter? */
- def isDependent: Boolean = IsDependentCollector.collect(this)
+ /** Is this type a dependent method type? */
+ def isDependentMethodType: Boolean = false
/** True for WildcardType or BoundedWildcardType. */
def isWildcard = false
@@ -367,7 +377,7 @@ trait Types extends api.Types { self: SymbolTable =>
* and all type parameters (if any) are invariant.
*/
def isFinalType =
- typeSymbol.isFinal && (typeSymbol.typeParams forall (_.variance == 0))
+ typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant)
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
@@ -446,7 +456,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (phase.erasedTypes) this
else {
val cowner = commonOwner(this)
- refinedType(List(this), cowner, EmptyScope, cowner.pos).narrow
+ refinedType(this :: Nil, cowner, EmptyScope, cowner.pos).narrow
}
/** For a TypeBounds type, itself;
@@ -696,7 +706,8 @@ trait Types extends api.Types { self: SymbolTable =>
* = Int
*/
def asSeenFrom(pre: Type, clazz: Symbol): Type = {
- TypesStats.timedTypeOp(asSeenFromNanos) {
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null
+ try {
val trivial = (
this.isTrivial
|| phase.erasedTypes && pre.typeSymbol != ArrayClass
@@ -711,7 +722,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (m.capturedSkolems.isEmpty) tp1
else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
}
- }
+ } finally if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
/** The info of `sym`, seen as a member of this type.
@@ -819,7 +830,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** Is this type a subtype of that type? */
def <:<(that: Type): Boolean = {
- if (util.Statistics.enabled) stat_<:<(that)
+ if (Statistics.canEnable) stat_<:<(that)
else {
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
@@ -847,26 +858,26 @@ trait Types extends api.Types { self: SymbolTable =>
}
def stat_<:<(that: Type): Boolean = {
- Statistics.incCounter(subtypeCount)
- val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
+ if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
val result =
(this eq that) ||
(if (explainSwitch) explain("<:", isSubType, this, that)
else isSubType(this, that, AnyDepth))
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
result
}
/** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
*/
def weak_<:<(that: Type): Boolean = {
- Statistics.incCounter(subtypeCount)
- val start = Statistics.pushTimer(typeOpsStack, subtypeNanos)
+ if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
val result =
((this eq that) ||
(if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
else isWeakSubType(this, that)))
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
result
}
@@ -980,7 +991,11 @@ trait Types extends api.Types { self: SymbolTable =>
def toLongString = {
val str = toString
if (str == "type") widen.toString
- else if ((str endsWith ".type") && !typeSymbol.isModuleClass) str + " (with underlying type " + widen + ")"
+ else if ((str endsWith ".type") && !typeSymbol.isModuleClass)
+ widen match {
+ case RefinedType(_, _) => "" + widen
+ case _ => s"$str (with underlying type $widen)"
+ }
else str
}
@@ -1011,7 +1026,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (!e.sym.hasFlag(excludedFlags)) {
if (sym == NoSymbol) sym = e.sym
else {
- if (alts.isEmpty) alts = List(sym)
+ if (alts.isEmpty) alts = sym :: Nil
alts = e.sym :: alts
}
}
@@ -1029,8 +1044,8 @@ trait Types extends api.Types { self: SymbolTable =>
// See (t0851) for a situation where this happens.
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
- Statistics.incCounter(findMembersCount)
- val start = Statistics.pushTimer(typeOpsStack, findMembersNanos)
+ if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var members: Scope = null
@@ -1082,7 +1097,7 @@ trait Types extends api.Types { self: SymbolTable =>
required |= DEFERRED
excluded &= ~(DEFERRED.toLong)
} // while (continue)
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) EmptyScope else members
}
@@ -1105,8 +1120,8 @@ trait Types extends api.Types { self: SymbolTable =>
// See (t0851) for a situation where this happens.
val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
- Statistics.incCounter(findMemberCount)
- val start = Statistics.pushTimer(typeOpsStack, findMemberNanos)
+ if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
//Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
var member: Symbol = NoSymbol
@@ -1117,7 +1132,6 @@ trait Types extends api.Types { self: SymbolTable =>
var excluded = excludedFlags | DEFERRED
var continue = true
var self: Type = null
- val fingerPrint: Long = name.fingerPrint
while (continue) {
continue = false
@@ -1125,75 +1139,73 @@ trait Types extends api.Types { self: SymbolTable =>
var bcs = bcs0
while (!bcs.isEmpty) {
val decls = bcs.head.info.decls
- if ((fingerPrint & decls.fingerPrints) != 0) {
- var entry = decls.lookupEntry(name)
- while (entry ne null) {
- val sym = entry.sym
- val flags = sym.flags
- if ((flags & required) == required) {
- val excl = flags & excluded
- if (excl == 0L &&
+ var entry = decls.lookupEntry(name)
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
(// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
- (bcs eq bcs0) ||
- (flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
- if (name.isTypeName || stableOnly && sym.isStable) {
- Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- return sym
- } else if (member eq NoSymbol) {
- member = sym
- } else if (members eq null) {
- if ((member ne sym) &&
- ((member.owner eq sym.owner) ||
- (flags & PRIVATE) != 0 || {
- if (self eq null) self = this.narrow
- if (membertpe eq null) membertpe = self.memberType(member)
- !(membertpe matches self.memberType(sym))
- })) {
- lastM = new ::(sym, null)
- members = member :: lastM
- }
- } else {
- var others: List[Symbol] = members
- var symtpe: Type = null
- while ((others ne null) && {
- val other = others.head
- (other ne sym) &&
- ((other.owner eq sym.owner) ||
- (flags & PRIVATE) != 0 || {
- if (self eq null) self = this.narrow
- if (symtpe eq null) symtpe = self.memberType(sym)
- !(self.memberType(other) matches symtpe)
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ (bcs0.head.hasTransOwner(bcs.head)))) {
+ if (name.isTypeName || stableOnly && sym.isStable) {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (suspension ne null) suspension foreach (_.suspended = false)
+ return sym
+ } else if (member eq NoSymbol) {
+ member = sym
+ } else if (members eq null) {
+ if ((member ne sym) &&
+ ((member.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = this.narrow
+ if (membertpe eq null) membertpe = self.memberType(member)
+ !(membertpe matches self.memberType(sym))
+ })) {
+ lastM = new ::(sym, null)
+ members = member :: lastM
+ }
+ } else {
+ var others: List[Symbol] = members
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.head
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = this.narrow
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
})}) {
- others = others.tail
- }
- if (others eq null) {
- val lastM1 = new ::(sym, null)
- lastM.tl = lastM1
- lastM = lastM1
- }
+ others = others.tail
+ }
+ if (others eq null) {
+ val lastM1 = new ::(sym, null)
+ lastM.tl = lastM1
+ lastM = lastM1
}
- } else if (excl == DEFERRED) {
- continue = true
}
+ } else if (excl == DEFERRED) {
+ continue = true
}
- entry = decls lookupNextEntry entry
- } // while (entry ne null)
- } // if (fingerPrint matches)
+ }
+ entry = decls lookupNextEntry entry
+ } // while (entry ne null)
// excluded = excluded | LOCAL
bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
} // while (!bcs.isEmpty)
required |= DEFERRED
excluded &= ~(DEFERRED.toLong)
} // while (continue)
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
- if (member == NoSymbol) Statistics.incCounter(noMemberCount)
+ if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
member
} else {
- Statistics.incCounter(multMemberCount)
+ if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
lastM.tl = Nil
baseClasses.head.newOverloaded(this, members)
}
@@ -1242,14 +1254,18 @@ trait Types extends api.Types { self: SymbolTable =>
// Subclasses ------------------------------------------------------------
- trait UniqueType extends Product {
- final override val hashCode = scala.runtime.ScalaRunTime._hashCode(this)
+ /**
+ * A type that can be passed to unique(..) and be stored in the uniques map.
+ */
+ abstract class UniqueType extends Type with Product {
+ final override val hashCode = computeHashCode
+ protected def computeHashCode = scala.runtime.ScalaRunTime._hashCode(this)
}
/** A base class for types that defer some operations
* to their immediate supertype.
*/
- abstract class SubType extends Type {
+ abstract class SubType extends UniqueType {
def supertype: Type
override def parents: List[Type] = supertype.parents
override def decls: Scope = supertype.decls
@@ -1280,7 +1296,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isVolatile = underlying.isVolatile
override def widen: Type = underlying.widen
override def baseTypeSeq: BaseTypeSeq = {
- Statistics.incCounter(singletonBaseTypeSeqCount)
+ if (Statistics.canEnable) Statistics.incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
@@ -1390,7 +1406,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "ThisType"
}
- final class UniqueThisType(sym: Symbol) extends ThisType(sym) with UniqueType { }
+ final class UniqueThisType(sym: Symbol) extends ThisType(sym) { }
object ThisType extends ThisTypeExtractor {
def apply(sym: Symbol): Type =
@@ -1402,7 +1418,11 @@ trait Types extends api.Types { self: SymbolTable =>
* Cannot be created directly; one should always use `singleType` for creation.
*/
abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType with SingleTypeApi {
- override val isTrivial: Boolean = pre.isTrivial
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN) trivial = fromBoolean(pre.isTrivial)
+ toBoolean(trivial)
+ }
override def isGround = sym.isPackageClass || pre.isGround
// override def isNullable = underlying.isNullable
@@ -1447,7 +1467,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "SingleType"
}
- final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym) with UniqueType { }
+ final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym)
object SingleType extends SingleTypeExtractor {
def apply(pre: Type, sym: Symbol): Type = {
@@ -1468,7 +1488,11 @@ trait Types extends api.Types { self: SymbolTable =>
}
abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType with SuperTypeApi {
- override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
+ toBoolean(trivial)
+ }
override def isNotNull = true;
override def typeSymbol = thistpe.typeSymbol
override def underlying = supertpe
@@ -1478,7 +1502,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "SuperType"
}
- final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp) with UniqueType { }
+ final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp)
object SuperType extends SuperTypeExtractor {
def apply(thistp: Type, supertp: Type): Type = {
@@ -1491,7 +1515,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
abstract case class TypeBounds(lo: Type, hi: Type) extends SubType with TypeBoundsApi {
def supertype = hi
- override val isTrivial: Boolean = lo.isTrivial && hi.isTrivial
+ override def isTrivial: Boolean = lo.isTrivial && hi.isTrivial
override def bounds: TypeBounds = this
def containsType(that: Type) = that match {
case TypeBounds(_, _) => that <:< this
@@ -1499,8 +1523,8 @@ trait Types extends api.Types { self: SymbolTable =>
}
private def lowerString = if (emptyLowerBound) "" else " >: " + lo
private def upperString = if (emptyUpperBound) "" else " <: " + hi
- private def emptyLowerBound = lo.typeSymbolDirect eq NothingClass
- private def emptyUpperBound = hi.typeSymbolDirect eq AnyClass
+ private def emptyLowerBound = typeIsNothing(lo)
+ private def emptyUpperBound = typeIsAny(hi)
def isEmptyBounds = emptyLowerBound && emptyUpperBound
// override def isNullable: Boolean = NullClass.tpe <:< lo;
@@ -1508,7 +1532,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "TypeBoundsType"
}
- final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) with UniqueType { }
+ final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
object TypeBounds extends TypeBoundsExtractor {
def empty: TypeBounds = apply(NothingClass.tpe, AnyClass.tpe)
@@ -1583,17 +1607,17 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def narrow: Type = typeSymbol.thisType
- override def isNotNull: Boolean = parents exists (_.isNotNull)
+ override def isNotNull: Boolean = parents exists typeIsNotNull
override def isStructuralRefinement: Boolean =
- typeSymbol.isAnonOrRefinementClass && decls.exists(_.isPossibleInRefinement)
+ typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
// override def isNullable: Boolean =
// parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
override def safeToString: String = parentsString(parents) + (
(if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
- decls.mkString("{", "; ", "}") else "")
+ fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
)
}
@@ -1602,7 +1626,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- if (tpe.parents.exists(_.exists(_.isInstanceOf[TypeVar]))) {
+ if (tpe.parents exists typeContainsTypeVar) {
// rename type vars to fresh type params, take base type sequence of
// resulting type, and rename back all the entries in that sequence
var tvs = Set[TypeVar]()
@@ -1629,8 +1653,8 @@ trait Types extends api.Types { self: SymbolTable =>
val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
tpe.baseTypeSeqCache = bts lateMap paramToVar
} else {
- Statistics.incCounter(compoundBaseTypeSeqCount)
- val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ if (Statistics.canEnable) Statistics.incCounter(compoundBaseTypeSeqCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null
try {
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache =
@@ -1639,7 +1663,7 @@ trait Types extends api.Types { self: SymbolTable =>
else
compoundBaseTypeSeq(tpe)
} finally {
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
// [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
// when compiling with
@@ -1687,12 +1711,12 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.baseClassesPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- val start = Statistics.pushTimer(typeOpsStack, baseClassesNanos)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null
try {
tpe.baseClassesCache = null
tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail)
} finally {
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
}
@@ -1710,7 +1734,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isHigherKinded = (
parents.nonEmpty &&
- (parents forall (_.isHigherKinded)) &&
+ (parents forall typeIsHigherKinded) &&
!phase.erasedTypes
)
@@ -1780,7 +1804,6 @@ trait Types extends api.Types { self: SymbolTable =>
false
}))
}
-
override def kind = "RefinedType"
}
@@ -1966,9 +1989,11 @@ trait Types extends api.Types { self: SymbolTable =>
/** A nicely formatted string with newlines and such.
*/
def formattedToString: String =
- parents.mkString("\n with ") +
- (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
- decls.mkString(" {\n ", "\n ", "\n}") else "")
+ parents.mkString("\n with ") + (
+ if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}")
+ else ""
+ )
}
object ClassInfoType extends ClassInfoTypeExtractor
@@ -1993,45 +2018,10 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "ConstantType"
}
- final class UniqueConstantType(value: Constant) extends ConstantType(value) with UniqueType {
- /** Save the type of `value`. For Java enums, it depends on finding the linked class,
- * which might not be found after `flatten`. */
- private lazy val _tpe: Type = value.tpe
- override def underlying: Type = _tpe
- }
+ final class UniqueConstantType(value: Constant) extends ConstantType(value)
object ConstantType extends ConstantTypeExtractor {
- def apply(value: Constant): ConstantType = {
- val tpe = new UniqueConstantType(value)
- if (value.tag == ClazzTag) {
- // if we carry a classOf, we might be in trouble
- // http://groups.google.com/group/scala-internals/browse_thread/thread/45185b341aeb6a30
- // I don't have time for a thorough fix, so I put a hacky workaround here
- val alreadyThere = uniques findEntry tpe
- if ((alreadyThere ne null) && (alreadyThere ne tpe) && (alreadyThere.toString != tpe.toString)) {
- // we need to remove a stale type that has the same hashcode as we do
- // HashSet doesn't support removal, and this makes our task non-trivial
- // also we cannot simply recreate it, because that'd skew hashcodes (that change over time, omg!)
- // the only solution I can see is getting into the underlying array and sneakily manipulating it
- val ftable = uniques.getClass.getDeclaredFields().find(f => f.getName endsWith "table").get
- ftable.setAccessible(true)
- val table = ftable.get(uniques).asInstanceOf[Array[AnyRef]]
- def overwrite(hc: Int, x: Type) {
- def index(x: Int): Int = math.abs(x % table.length)
- var h = index(hc)
- var entry = table(h)
- while (entry ne null) {
- if (x == entry)
- table(h) = x
- h = index(h + 1)
- entry = table(h)
- }
- }
- overwrite(tpe.##, tpe)
- }
- }
- unique(tpe).asInstanceOf[ConstantType]
- }
+ def apply(value: Constant) = unique(new UniqueConstantType(value))
}
/* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
@@ -2041,7 +2031,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var volatileRecursions: Int = 0
private val pendingVolatiles = new mutable.HashSet[Symbol]
- class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) with UniqueType {
+ class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
require(args0.nonEmpty, this)
/** No unapplied type params size it has (should have) equally as many args. */
@@ -2094,7 +2084,7 @@ trait Types extends api.Types { self: SymbolTable =>
override protected def finishPrefix(rest: String) = "" + thisInfo
}
- class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) with UniqueType {
+ class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) {
// A reference (in a Scala program) to a type that has type parameters, but where the reference
// does not include type arguments. Note that it doesn't matter whether the symbol refers
// to a java or scala symbol, but it does matter whether it occurs in java or scala code.
@@ -2202,7 +2192,15 @@ trait Types extends api.Types { self: SymbolTable =>
override protected def normalizeImpl =
if (typeParamsMatchArgs) betaReduce.normalize
else if (isHigherKinded) super.normalizeImpl
- else ErrorType
+ else {
+ // if we are overriding a type alias in an erroneous way, don't just
+ // return an ErrorType since that will result in useless error msg.
+ // Instead let's try to recover from it and rely on refcheck reporting the correct error,
+ // if that fails fallback to the old behaviour.
+ val overriddenSym = sym.nextOverriddenSymbol
+ if (overriddenSym != NoSymbol) pre.memberType(overriddenSym).normalize
+ else ErrorType
+ }
// isHKSubType0 introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
@@ -2290,15 +2288,32 @@ trait Types extends api.Types { self: SymbolTable =>
*
* @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty
*/
- abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type with TypeRefApi {
- override val isTrivial: Boolean = !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
-
+ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends UniqueType with TypeRefApi {
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN)
+ trivial = fromBoolean(!sym.isTypeParameter && pre.isTrivial && areTrivialTypes(args))
+ toBoolean(trivial)
+ }
private[reflect] var parentsCache: List[Type] = _
private[reflect] var parentsPeriod = NoPeriod
private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
private[reflect] var baseTypeSeqPeriod = NoPeriod
private var normalized: Type = _
+ //OPT specialize hashCode
+ override final def computeHashCode = {
+ import scala.util.hashing.MurmurHash3._
+ val hasArgs = args.nonEmpty
+ var h = productSeed
+ h = mix(h, pre.hashCode)
+ h = mix(h, sym.hashCode)
+ if (hasArgs)
+ finalizeHash(mix(h, args.hashCode), 3)
+ else
+ finalizeHash(h, 2)
+ }
+
// @M: propagate actual type params (args) to `tp`, by replacing
// formal type parameters with actual ones. If tp is higher kinded,
// the "actual" type arguments are types that simply reference the
@@ -2395,14 +2410,14 @@ trait Types extends api.Types { self: SymbolTable =>
private def needsPreString = (
settings.debug.value
|| !shorthands(sym.fullName)
- || sym.ownerChain.exists(s => !s.isClass)
+ || (sym.ownersIterator exists (s => !s.isClass))
)
private def preString = if (needsPreString) pre.prefixString else ""
private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
def refinementString = (
if (sym.isStructuralRefinement) (
- decls filter (sym => sym.isPossibleInRefinement && sym.isPublic)
+ fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
map (_.defString)
mkString("{", "; ", "}")
)
@@ -2496,13 +2511,13 @@ trait Types extends api.Types { self: SymbolTable =>
if (period != currentPeriod) {
tpe.baseTypeSeqPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
- Statistics.incCounter(typerefBaseTypeSeqCount)
- val start = Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos)
+ if (Statistics.canEnable) Statistics.incCounter(typerefBaseTypeSeqCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null
try {
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
} finally {
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
}
@@ -2519,14 +2534,22 @@ trait Types extends api.Types { self: SymbolTable =>
case class MethodType(override val params: List[Symbol],
override val resultType: Type) extends Type with MethodTypeApi {
- override lazy val isTrivial: Boolean =
- isTrivialResult && (params forall isTrivialParam)
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN) trivial = fromBoolean(isTrivialResult && areTrivialParams(params))
+ toBoolean(trivial)
+ }
private def isTrivialResult =
resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
- private def isTrivialParam(p: Symbol) =
- p.tpe.isTrivial && !(params.exists(_.tpe contains p) || (resultType contains p))
+ private def areTrivialParams(ps: List[Symbol]): Boolean = ps match {
+ case p :: rest =>
+ p.tpe.isTrivial && !typesContain(paramTypes, p) && !(resultType contains p) &&
+ areTrivialParams(rest)
+ case _ =>
+ true
+ }
def isImplicit = params.nonEmpty && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
@@ -2542,13 +2565,19 @@ trait Types extends api.Types { self: SymbolTable =>
override def resultType(actuals: List[Type]) =
if (isTrivial || phase.erasedTypes) resultType
- else if (sameLength(actuals, params)) {
+ else if (/*isDependentMethodType &&*/ sameLength(actuals, params)) {
val idm = new InstantiateDependentMap(params, actuals)
val res = idm(resultType)
existentialAbstraction(idm.existentialsNeeded, res)
}
else existentialAbstraction(params, resultType)
+ private var isdepmeth: ThreeValue = UNKNOWN
+ override def isDependentMethodType: Boolean = {
+ if (isdepmeth == UNKNOWN) isdepmeth = fromBoolean(IsDependentCollector.collect(resultType))
+ toBoolean(isdepmeth)
+ }
+
// implicit args can only be depended on in result type:
//TODO this may be generalised so that the only constraint is dependencies are acyclic
def approximate: MethodType = MethodType(params, resultApprox)
@@ -2563,7 +2592,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def atOwner(owner: Symbol) =
- if ((params exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
+ if (!allSymbolsHaveOwner(params, owner) || (resultType.atOwner(owner) ne resultType))
cloneInfo(owner)
else
this
@@ -2651,7 +2680,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def atOwner(owner: Symbol) =
- if ((typeParams exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
+ if (!allSymbolsHaveOwner(typeParams, owner) || (resultType.atOwner(owner) ne resultType))
cloneInfo(owner)
else
this
@@ -2755,7 +2784,7 @@ trait Types extends api.Types { self: SymbolTable =>
createFromClonedSymbolsAtOwner(quantified, owner, underlying)(newExistentialType)
override def atOwner(owner: Symbol) =
- if (quantified exists (_.owner != owner)) cloneInfo(owner) else this
+ if (!allSymbolsHaveOwner(quantified, owner)) cloneInfo(owner) else this
override def kind = "ExistentialType"
@@ -2866,14 +2895,13 @@ trait Types extends api.Types { self: SymbolTable =>
* any results.
*/
if (propagateParameterBoundsToTypeVars) {
- val exclude = bounds.isEmptyBounds || bounds.exists(_.typeSymbolDirect.isNonClassType)
+ val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType)
if (exclude) new TypeConstraint
else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds))
}
else new TypeConstraint
}
- def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
def untouchable(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = true)
def apply(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = false)
def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
@@ -2886,7 +2914,7 @@ trait Types extends api.Types { self: SymbolTable =>
val tv = (
if (args.isEmpty && params.isEmpty) {
if (untouchable) new TypeVar(origin, constr) with UntouchableTypeVar
- else new TypeVar(origin, constr)
+ else new TypeVar(origin, constr) {}
}
else if (args.size == params.size) {
if (untouchable) new AppliedTypeVar(origin, constr, params zip args) with UntouchableTypeVar
@@ -2912,13 +2940,12 @@ trait Types extends api.Types { self: SymbolTable =>
if (tp == NoType) tp
else existentialAbstraction(existentialsInType(tp), tp)
)
+
def containsExistential(tpe: Type) =
- tpe exists (_.typeSymbol.isExistentiallyBound)
+ tpe exists typeIsExistentiallyBound
- def existentialsInType(tpe: Type) = (
- for (tp <- tpe ; if tp.typeSymbol.isExistentiallyBound) yield
- tp.typeSymbol
- )
+ def existentialsInType(tpe: Type) =
+ tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
/** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
*/
@@ -2977,9 +3004,9 @@ trait Types extends api.Types { self: SymbolTable =>
*
* Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
*/
- class TypeVar(
+ abstract case class TypeVar(
val origin: Type,
- val constr0: TypeConstraint
+ var constr: TypeConstraint
) extends Type {
def untouchable = false // by other typevars
override def params: List[Symbol] = Nil
@@ -2992,7 +3019,7 @@ trait Types extends api.Types { self: SymbolTable =>
* in operations that are exposed from types. Hence, no syncing of `constr`
* or `encounteredHigherLevel` or `suspended` accesses should be necessary.
*/
- var constr = constr0
+// var constr = constr0
def instValid = constr.instValid
override def isGround = instValid && constr.inst.isGround
@@ -3391,16 +3418,16 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- /** A temporary type representing the reasure of a user-defined value type.
- * Created during phase reasure, elimintaed again in posterasure.
- * @param sym The value class symbol
- * @param underlying The underlying type before erasure
+ /** A temporary type representing the erasure of a user-defined value type.
+ * Created during phase erasure, eliminated again in posterasure.
+ *
+ * @param original The underlying type before erasure
*/
- abstract case class ErasedValueType(original: TypeRef) extends Type {
+ abstract case class ErasedValueType(original: TypeRef) extends UniqueType {
override def safeToString = "ErasedValueType("+original+")"
}
- final class UniqueErasedValueType(original: TypeRef) extends ErasedValueType(original) with UniqueType
+ final class UniqueErasedValueType(original: TypeRef) extends ErasedValueType(original)
object ErasedValueType {
def apply(original: TypeRef): Type = {
@@ -3418,6 +3445,16 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "LazyType"
}
+ /** A marker trait representing an as-yet unevaluated type
+ * which doesn't assign flags to the underlying symbol.
+ */
+ trait FlagAgnosticCompleter extends LazyType
+
+ /** A marker trait representing an as-yet unevaluated type
+ * which assigns flags to the underlying symbol.
+ */
+ trait FlagAssigningCompleter extends LazyType
+
abstract class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType {
override def safeToString =
(if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString
@@ -3636,10 +3673,15 @@ trait Types extends api.Types { self: SymbolTable =>
* may or may not be poly? (It filched the standard "canonical creator" name.)
*/
object GenPolyType {
- def apply(tparams: List[Symbol], tpe: Type): Type = (
+ def apply(tparams: List[Symbol], tpe: Type): Type = {
+ tpe match {
+ case MethodType(_, _) =>
+ assert(tparams forall (_.isInvariant), "Trying to create a method with variant type parameters: " + ((tparams, tpe)))
+ case _ =>
+ }
if (tparams.nonEmpty) typeFun(tparams, tpe)
else tpe // it's okay to be forgiving here
- )
+ }
def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match {
case PolyType(tparams, restpe) => Some((tparams, restpe))
case _ => Some((Nil, tpe))
@@ -3665,12 +3707,12 @@ trait Types extends api.Types { self: SymbolTable =>
*
* tpe1 where { tparams }
*
- * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`.
+ * where `tpe1` is the result of extrapolating `tpe` with respect to `tparams`.
* Extrapolating means that type variables in `tparams` occurring
* in covariant positions are replaced by upper bounds, (minus any
* SingletonClass markers), type variables in `tparams` occurring in
* contravariant positions are replaced by upper bounds, provided the
- * resulting type is legal wrt to stability, and does not contain any type
+ * resulting type is legal with regard to stability, and does not contain any type
* variable in `tparams`.
*
* The abstraction drops all type parameters that are not directly or
@@ -3711,10 +3753,11 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeRef(_, SingletonClass, _) =>
AnyClass.tpe
case tp1 @ RefinedType(parents, decls) =>
- var parents1 = parents filter (_.typeSymbol != SingletonClass)
- if (parents1.isEmpty) parents1 = List(AnyClass.tpe)
- if (parents1.tail.isEmpty && decls.isEmpty) mapOver(parents1.head)
- else mapOver(copyRefinedType(tp1, parents1, decls))
+ parents filter (_.typeSymbol != SingletonClass) match {
+ case Nil => AnyClass.tpe
+ case p :: Nil if decls.isEmpty => mapOver(p)
+ case ps => mapOver(copyRefinedType(tp1, ps, decls))
+ }
case tp1 =>
mapOver(tp1)
}
@@ -3819,7 +3862,7 @@ trait Types extends api.Types { self: SymbolTable =>
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
- Statistics.incCounter(rawTypeCount)
+ if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
perRunCaches.recordCache(uniques)
@@ -3856,8 +3899,8 @@ trait Types extends api.Types { self: SymbolTable =>
* guarding addLoBound/addHiBound somehow broke raw types so it
* only guards against being created with them.]
*/
- private var lobounds = lo0 filterNot (_.typeSymbolDirect eq NothingClass)
- private var hibounds = hi0 filterNot (_.typeSymbolDirect eq AnyClass)
+ private var lobounds = lo0 filterNot typeIsNothing
+ private var hibounds = hi0 filterNot typeIsAny
private var numlo = numlo0
private var numhi = numhi0
private var avoidWidening = avoidWidening0
@@ -3867,13 +3910,15 @@ trait Types extends api.Types { self: SymbolTable =>
def avoidWiden: Boolean = avoidWidening
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numlo == NoType || isNumericSubType(numlo, tp))
- numlo = tp
- else if (!isNumericSubType(tp, numlo))
- numlo = numericLoBound
+ if (!lobounds.contains(tp)) {
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numlo == NoType || isNumericSubType(numlo, tp))
+ numlo = tp
+ else if (!isNumericSubType(tp, numlo))
+ numlo = numericLoBound
+ }
+ else lobounds ::= tp
}
- else lobounds ::= tp
}
def checkWidening(tp: Type) {
@@ -3885,14 +3930,16 @@ trait Types extends api.Types { self: SymbolTable =>
}
def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- checkWidening(tp)
- if (isNumericBound && isNumericValueType(tp)) {
- if (numhi == NoType || isNumericSubType(tp, numhi))
- numhi = tp
- else if (!isNumericSubType(numhi, tp))
- numhi = numericHiBound
+ if (!hibounds.contains(tp)) {
+ checkWidening(tp)
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numhi == NoType || isNumericSubType(tp, numhi))
+ numhi = tp
+ else if (!isNumericSubType(numhi, tp))
+ numhi = numericHiBound
+ }
+ else hibounds ::= tp
}
- else hibounds ::= tp
}
def isWithinBounds(tp: Type): Boolean =
@@ -3913,8 +3960,8 @@ trait Types extends api.Types { self: SymbolTable =>
override def toString = {
val boundsStr = {
- val lo = loBounds filterNot (_.typeSymbolDirect eq NothingClass)
- val hi = hiBounds filterNot (_.typeSymbolDirect eq AnyClass)
+ val lo = loBounds filterNot typeIsNothing
+ val hi = hiBounds filterNot typeIsAny
val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
@@ -3961,15 +4008,18 @@ trait Types extends api.Types { self: SymbolTable =>
override def variance = _variance
def variance_=(x: Int) = _variance = x
- override protected def noChangeToSymbols(origSyms: List[Symbol]) = {
- origSyms forall { sym =>
- val v = variance
- if (sym.isAliasType) variance = 0
- val result = this(sym.info)
- variance = v
- result eq sym.info
+ override protected def noChangeToSymbols(origSyms: List[Symbol]) =
+ //OPT inline from forall to save on #closures
+ origSyms match {
+ case sym :: rest =>
+ val v = variance
+ if (sym.isAliasType) variance = 0
+ val result = this(sym.info)
+ variance = v
+ (result eq sym.info) && noChangeToSymbols(rest)
+ case _ =>
+ true
}
- }
override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
map2Conserve(args, tparams) { (arg, tparam) =>
@@ -4354,7 +4404,7 @@ trait Types extends api.Types { self: SymbolTable =>
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
- @inline private def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ private def skipPrefixOf(pre: Type, clazz: Symbol) = (
(pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
)
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
@@ -4474,7 +4524,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sameLength(basesym.typeParams, baseargs))
instParam(basesym.typeParams, baseargs)
else
- if (symclazz.tpe.parents.exists(_.isErroneous))
+ if (symclazz.tpe.parents exists typeIsErroneous)
ErrorType // don't be to overzealous with throwing exceptions, see #2641
else
throw new Error(
@@ -4525,7 +4575,7 @@ trait Types extends api.Types { self: SymbolTable =>
else subst(tp, sym, from.tail, to.tail)
val boundSyms = tp0.boundSyms
- val tp1 = if (boundSyms exists from.contains) renameBoundSyms(tp0) else tp0
+ val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
val tp = mapOver(tp1)
tp match {
@@ -4565,10 +4615,12 @@ trait Types extends api.Types { self: SymbolTable =>
tp match {
case TypeRef(pre, sym, args) if pre ne NoPrefix =>
val newSym = subst(sym, from, to)
+ // mapOver takes care of subst'ing in args
+ mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
// assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
- mapOver(copyTypeRef(tp, pre, newSym, args)) // mapOver takes care of subst'ing in args
case SingleType(pre, sym) if pre ne NoPrefix =>
- mapOver(singleType(pre, subst(sym, from, to)))
+ val newSym = subst(sym, from, to)
+ mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
case _ =>
super.apply(tp)
}
@@ -4664,6 +4716,8 @@ trait Types extends api.Types { self: SymbolTable =>
else mapOver(tp)
}
+ /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
+ */
class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
private val actuals = actuals0.toIndexedSeq
private val existentials = new Array[Symbol](actuals.size)
@@ -5038,9 +5092,9 @@ trait Types extends api.Types { self: SymbolTable =>
// in addition to making subtyping "more correct" for type vars,
// it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
// this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
- @inline def suspend(tp: Type) =
+ def suspend(tp: Type) =
if (tp.isGround) null else suspendTypeVarsInType(tp)
- @inline def revive(suspension: List[TypeVar]) =
+ def revive(suspension: List[TypeVar]) =
if (suspension ne null) suspension foreach (_.suspended = false)
val suspensions = Array(tp1, stp.tp1, tp2, stp.tp2) map suspend
@@ -5097,14 +5151,21 @@ trait Types extends api.Types { self: SymbolTable =>
1
}
- private def maxDepth(tps: Seq[Type], by: Type => Int): Int = {
- var d = 0
- for (tp <- tps) d = d max by(tp)
- d
+ private def maxDepth(tps: List[Type], by: Type => Int): Int = {
+ //OPT replaced with tailrecursive function to save on #closures
+ // was:
+ // var d = 0
+ // for (tp <- tps) d = d max by(tp) //!!!OPT!!!
+ // d
+ def loop(tps: List[Type], acc: Int): Int = tps match {
+ case tp :: rest => loop(rest, acc max by(tp))
+ case _ => acc
+ }
+ loop(tps, 0)
}
- private def typeDepth(tps: Seq[Type]): Int = maxDepth(tps, typeDepth)
- private def baseTypeSeqDepth(tps: Seq[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth)
+ private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth)
+ private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth)
/** Is intersection of given types populated? That is,
* for all types tp1, tp2 in intersection
@@ -5213,11 +5274,24 @@ trait Types extends api.Types { self: SymbolTable =>
/** Do `tp1` and `tp2` denote equivalent types? */
def isSameType(tp1: Type, tp2: Type): Boolean = try {
- Statistics.incCounter(sametypeCount)
+ if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
subsametypeRecursions += 1
- undoLog undoUnless {
- isSameType1(tp1, tp2)
- }
+ //OPT cutdown on Function0 allocation
+ //was:
+// undoLog undoUnless {
+// isSameType1(tp1, tp2)
+// }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = {
+ isSameType1(tp1, tp2)
+ } finally if (!result) undoLog.undoTo(before)
+ result
+ } finally undoLog.unlock()
} finally {
subsametypeRecursions -= 1
// XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
@@ -5522,7 +5596,7 @@ trait Types extends api.Types { self: SymbolTable =>
case _: SingletonType =>
tp2 match {
case _: SingletonType =>
- @inline def chaseDealiasedUnderlying(tp: Type): Type = {
+ def chaseDealiasedUnderlying(tp: Type): Type = {
var origin = tp
var next = origin.underlying.dealias
while (next.isInstanceOf[SingletonType]) {
@@ -5568,22 +5642,49 @@ trait Types extends api.Types { self: SymbolTable =>
def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
subsametypeRecursions += 1
- undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType2(tp1, tp2, depth)
- }
- }
+ //OPT cutdown on Function0 allocation
+ //was:
+// undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
+// if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+// val p = new SubTypePair(tp1, tp2)
+// if (pendingSubTypes(p))
+// false
+// else
+// try {
+// pendingSubTypes += p
+// isSubType2(tp1, tp2, depth)
+// } finally {
+// pendingSubTypes -= p
+// }
+// } else {
+// isSubType2(tp1, tp2, depth)
+// }
+// }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false
+ else
+ try {
+ pendingSubTypes += p
+ isSubType2(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType2(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
+
+ result
+ } finally undoLog.unlock()
} finally {
subsametypeRecursions -= 1
// XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
@@ -5593,8 +5694,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
- * type selections with the same name of equal (wrt) =:= prefixes are
- * considered equal wrt =:=
+ * type selections with the same name of equal (as determined by `=:=`) prefixes are
+ * considered equal in regard to `=:=`.
*/
def beginsWithTypeVarOrIsRefined(tp: Type): Boolean = tp match {
case SingleType(pre, sym) =>
@@ -5607,6 +5708,7 @@ trait Types extends api.Types { self: SymbolTable =>
false
}
+ @deprecated("The compiler doesn't use this so you shouldn't either - it will be removed", "2.10.0")
def instTypeVar(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
copyTypeRef(tp, instTypeVar(pre), sym, args)
@@ -5889,9 +5991,17 @@ trait Types extends api.Types { self: SymbolTable =>
def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean =
tp.typeSymbol == NothingClass ||
- tp.typeSymbol == NullClass && containsNull(sym.owner) ||
- (tp.nonPrivateMember(sym.name).alternatives exists
- (alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym, depth)))
+ tp.typeSymbol == NullClass && containsNull(sym.owner) || {
+ def specializedBy(membr: Symbol): Boolean =
+ membr == sym || specializesSym(tp.narrow, membr, sym.owner.thisType, sym, depth)
+ val member = tp.nonPrivateMember(sym.name)
+ if (member eq NoSymbol) false
+ else if (member.isOverloaded) member.alternatives exists specializedBy
+ else specializedBy(member)
+ // was
+ // (tp.nonPrivateMember(sym.name).alternatives exists
+ // (alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym, depth)))
+ }
/** Does member `sym1` of `tp1` have a stronger type
* than member `sym2` of `tp2`?
@@ -6137,9 +6247,9 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
var bounds = instantiatedBounds(pre, owner, tparams, targs)
- if (targs.exists(_.annotations.nonEmpty))
+ if (targs exists typeHasAnnotations)
bounds = adaptBoundsToAnnotations(bounds, tparams, targs)
- (bounds corresponds targs)(_ containsType _)
+ (bounds corresponds targs)(boundsContainType)
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
@@ -6207,7 +6317,7 @@ trait Types extends api.Types { self: SymbolTable =>
private def lubList(ts: List[Type], depth: Int): List[Type] = {
// Matching the type params of one of the initial types means dummies.
val initialTypeParams = ts map (_.typeParams)
- def isHotForTs(xs: List[Type]) = initialTypeParams contains xs.map(_.typeSymbol)
+ def isHotForTs(xs: List[Type]) = initialTypeParams contains (xs map (_.typeSymbol))
def elimHigherOrderTypeParam(tp: Type) = tp match {
case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor
@@ -6217,7 +6327,7 @@ trait Types extends api.Types { self: SymbolTable =>
def loop(tsBts: List[List[Type]]): List[Type] = {
lubListDepth += 1
- if (tsBts.isEmpty || tsBts.exists(_.isEmpty)) Nil
+ if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) Nil
else if (tsBts.tail.isEmpty) tsBts.head
else {
// ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
@@ -6232,7 +6342,7 @@ trait Types extends api.Types { self: SymbolTable =>
// Produce a single type for this frontier by merging the prefixes and arguments of those
// typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information wrt subtyping. Before
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
// merging, strip targs that refer to bound tparams (when we're computing the lub of type
// constructors.) Also filter out all types that are a subtype of some other type.
if (isUniformFrontier) {
@@ -6281,7 +6391,7 @@ trait Types extends api.Types { self: SymbolTable =>
loop(initialBTSes)
}
- /** The minimal symbol (wrt Symbol.isLess) of a list of types */
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
private def minSym(tps: List[Type]): Symbol =
(tps.head.typeSymbol /: tps.tail) {
(sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
@@ -6299,10 +6409,12 @@ trait Types extends api.Types { self: SymbolTable =>
* of some other element of the list. */
private def elimSuper(ts: List[Type]): List[Type] = ts match {
case List() => List()
+ case List(t) => List(t)
case t :: ts1 =>
val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
if (rest exists (t1 => t1 <:< t)) rest else t :: rest
}
+
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
clazz.classBound.asSeenFrom(pre, clazz.owner)
@@ -6319,6 +6431,7 @@ trait Types extends api.Types { self: SymbolTable =>
private def elimSub(ts: List[Type], depth: Int): List[Type] = {
def elimSub0(ts: List[Type]): List[Type] = ts match {
case List() => List()
+ case List(t) => List(t)
case t :: ts1 =>
val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
@@ -6352,7 +6465,7 @@ trait Types extends api.Types { self: SymbolTable =>
def weakLub(ts: List[Type]) =
if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
- else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty)))
+ else if (ts exists typeHasAnnotations)
(annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
else (lub(ts), false)
@@ -6361,7 +6474,7 @@ trait Types extends api.Types { self: SymbolTable =>
val nglb = numericGlb(ts)
if (nglb != NoType) (nglb, true)
else (glb(ts), false)
- } else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty))) {
+ } else if (ts exists typeHasAnnotations) {
(annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
} else (glb(ts), false)
}
@@ -6404,7 +6517,7 @@ trait Types extends api.Types { self: SymbolTable =>
* test/continuations-neg/function3.scala goes into an infinite loop.
* (Even if the calls are to typeSymbolDirect.)
*/
- def isNumericSubType(tp1: Type, tp2: Type) = (
+ def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
isNumericValueType(tp1)
&& isNumericValueType(tp2)
&& isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
@@ -6417,14 +6530,14 @@ trait Types extends api.Types { self: SymbolTable =>
case List() => NothingClass.tpe
case List(t) => t
case _ =>
- Statistics.incCounter(lubCount)
- val start = Statistics.pushTimer(typeOpsStack, lubNanos)
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
try {
lub(ts, lubDepth(ts))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6437,8 +6550,8 @@ trait Types extends api.Types { self: SymbolTable =>
val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
tparam.cloneSymbol.setInfo(glb(bounds, depth)))
PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
- case ts @ MethodType(params, _) :: rest =>
- MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe))))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
case ts @ NullaryMethodType(_) :: rest =>
NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
case ts @ TypeBounds(_, _) :: rest =>
@@ -6540,13 +6653,13 @@ trait Types extends api.Types { self: SymbolTable =>
indent = indent + " "
assert(indent.length <= 100)
}
- Statistics.incCounter(nestedLubCount)
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
val res = lub0(ts)
if (printLubs) {
indent = indent stripSuffix " "
println(indent + "lub of " + ts + " is " + res)//debug
}
- if (ts forall (_.isNotNull)) res.notNull else res
+ if (ts forall typeIsNotNull) res.notNull else res
}
val GlbFailure = new Throwable
@@ -6560,19 +6673,19 @@ trait Types extends api.Types { self: SymbolTable =>
private var globalGlbDepth = 0
private final val globalGlbLimit = 2
- /** The greatest lower bound wrt <:< of a list of types */
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
def glb(ts: List[Type]): Type = elimSuper(ts) match {
case List() => AnyClass.tpe
case List(t) => t
case ts0 =>
- Statistics.incCounter(lubCount)
- val start = Statistics.pushTimer(typeOpsStack, lubNanos)
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
try {
glbNorm(ts0, lubDepth(ts0))
} finally {
lubResults.clear()
glbResults.clear()
- Statistics.popTimer(typeOpsStack, start)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
@@ -6582,8 +6695,8 @@ trait Types extends api.Types { self: SymbolTable =>
case ts0 => glbNorm(ts0, depth)
}
- /** The greatest lower bound wrt <:< of a list of types, which have been normalized
- * wrt elimSuper */
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
protected def glbNorm(ts: List[Type], depth: Int): Type = {
def glb0(ts0: List[Type]): Type = ts0 match {
case List() => AnyClass.tpe
@@ -6592,8 +6705,8 @@ trait Types extends api.Types { self: SymbolTable =>
val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
tparam.cloneSymbol.setInfo(lub(bounds, depth)))
PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
- case ts @ MethodType(params, _) :: rest =>
- MethodType(params, glbNorm(matchingRestypes(ts, params map (_.tpe)), depth))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
case ts @ NullaryMethodType(_) :: rest =>
NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
case ts @ TypeBounds(_, _) :: rest =>
@@ -6687,12 +6800,12 @@ trait Types extends api.Types { self: SymbolTable =>
}
// if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
- Statistics.incCounter(nestedLubCount)
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
val res = glb0(ts)
// if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
- if (ts exists (_.isNotNull)) res.notNull else res
+ if (ts exists typeIsNotNull) res.notNull else res
}
/** A list of the typevars in a type. */
@@ -6734,7 +6847,7 @@ trait Types extends api.Types { self: SymbolTable =>
// special treatment for lubs of array types after erasure:
// if argss contain one value type and some other type, the lub is Object
// if argss contain several reference types, the lub is an array over lub of argtypes
- if (argss exists (_.isEmpty)) {
+ if (argss exists typeListIsEmpty) {
None // something is wrong: an array without a type arg.
} else {
val args = argss map (_.head)
@@ -6856,7 +6969,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
tps map {
- case MethodType(params1, res) if (isSameTypes(params1 map (_.tpe), pts)) =>
+ case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
res
case NullaryMethodType(res) if pts.isEmpty =>
res
@@ -6927,7 +7040,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
// Add serializable to a list of parents, unless one of them already is
def addSerializable(ps: Type*): List[Type] = (
- if (ps exists (_ <:< SerializableClass.tpe)) ps.toList
+ if (ps exists typeIsSubTypeOfSerializable) ps.toList
else (ps :+ SerializableClass.tpe).toList
)
@@ -6968,6 +7081,35 @@ trait Types extends api.Types { self: SymbolTable =>
tostringRecursions -= 1
}
+// ----- Hoisted closures and convenience methods, for compile time reductions -------
+
+ private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull
+ private[scala] val isTypeVar = (tp: Type) => tp.isInstanceOf[TypeVar]
+ private[scala] val typeContainsTypeVar = (tp: Type) => tp exists isTypeVar
+ private[scala] val typeIsNonClassType = (tp: Type) => tp.typeSymbolDirect.isNonClassType
+ private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
+ private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
+ private[scala] val typeIsError = (tp: Type) => tp.isError
+ private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
+ private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
+ private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
+ private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableClass.tpe
+ private[scala] val typeIsNothing = (tp: Type) => tp.typeSymbolDirect eq NothingClass
+ private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
+ private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
+
+ @tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
+ case tp :: rest => (tp contains sym) || typesContain(rest, sym)
+ case _ => false
+ }
+
+ @tailrec private def areTrivialTypes(tps: List[Type]): Boolean = tps match {
+ case tp :: rest => tp.isTrivial && areTrivialTypes(rest)
+ case _ => true
+ }
+
+// -------------- Classtags --------------------------------------------------------
+
implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
implicit val BoundedWildcardTypeTag = ClassTag[BoundedWildcardType](classOf[BoundedWildcardType])
implicit val ClassInfoTypeTag = ClassTag[ClassInfoType](classOf[ClassInfoType])
@@ -6986,7 +7128,10 @@ trait Types extends api.Types { self: SymbolTable =>
implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
implicit val TypeTagg = ClassTag[Type](classOf[Type])
+// -------------- Statistics --------------------------------------------------------
+
Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size }
+
}
object TypesStats {
@@ -7014,9 +7159,11 @@ object TypesStats {
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
val typeOpsStack = Statistics.newTimerStack()
+ /** Commented out, because right now this does not inline, so creates a closure which will distort statistics
@inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = {
val start = Statistics.pushTimer(typeOpsStack, c)
try op
- finally Statistics.popTimer(typeOpsStack, start)
+ finally
}
+ */
}
diff --git a/src/reflect/scala/reflect/internal/package.scala b/src/reflect/scala/reflect/internal/package.scala
deleted file mode 100644
index 99b837152d..0000000000
--- a/src/reflect/scala/reflect/internal/package.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala.reflect
-
-package object internal {
-
- type MirrorOf[U <: base.Universe with Singleton] = base.MirrorOf[U]
-}
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 55746f414b..b158a1ac26 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -14,13 +14,13 @@ import java.lang.Double.longBitsToDouble
import Flags._
import PickleFormat._
import scala.collection.{ mutable, immutable }
-import collection.mutable.ListBuffer
-import annotation.switch
+import scala.collection.mutable.ListBuffer
+import scala.annotation.switch
/** @author Martin Odersky
* @version 1.0
*/
-abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
+abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ {
val global: SymbolTable
import global._
@@ -230,9 +230,11 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
fromName(nme.expandedName(name.toTermName, owner)) orElse {
// (3) Try as a nested object symbol.
nestedObjectSymbol orElse {
- // (4) Otherwise, fail.
- //System.err.println("missing "+name+" in "+owner+"/"+owner.id+" "+owner.info.decls)
- adjust(errorMissingRequirement(name, owner))
+ // (4) Call the mirror's "missing" hook.
+ adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse {
+ // (5) Create a stub symbol to defer hard failure a little longer.
+ owner.newStubSymbol(name)
+ }
}
}
}
@@ -444,7 +446,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
private def readArrayAnnot() = {
readByte() // skip the `annotargarray` tag
val end = readNat() + readIndex
- until(end, () => readClassfileAnnotArg(readNat())).toArray(ClassfileAnnotArgTag)
+ until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
}
protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation))
@@ -767,8 +769,21 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
}
/* Read a reference to a pickled item */
+ protected def readSymbolRef(): Symbol = {//OPT inlined from: at(readNat(), readSymbol) to save on closure creation
+ val i = readNat()
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = readSymbol()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[Symbol]
+ }
+
protected def readNameRef(): Name = at(readNat(), readName)
- protected def readSymbolRef(): Symbol = at(readNat(), readSymbol)
protected def readTypeRef(): Type = at(readNat(), () => readType()) // after the NMT_TRANSITION period, we can leave off the () => ... ()
protected def readConstantRef(): Constant = at(readNat(), readConstant)
protected def readAnnotationRef(): AnnotationInfo = at(readNat(), readAnnotation)
@@ -833,7 +848,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
}
/** A lazy type which when completed returns type at index `i`. */
- private class LazyTypeRef(i: Int) extends LazyType {
+ private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter {
private val definedAtRunId = currentRunId
private val p = phase
override def complete(sym: Symbol) : Unit = try {
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 368d55a59c..977398909f 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -141,7 +141,7 @@ trait Erasure {
if (restpe.typeSymbol == UnitClass) erasedTypeRef(UnitClass)
// this replaces each typeref that refers to an argument
// by the type `p.tpe` of the actual argument p (p in params)
- else apply(mt.resultType(params map (_.tpe))))
+ else apply(mt.resultType(mt.paramTypes)))
case RefinedType(parents, decls) =>
apply(mergeParents(parents))
case AnnotatedType(_, atp, _) =>
@@ -203,28 +203,26 @@ trait Erasure {
def specialErasure(sym: Symbol)(tp: Type): Type =
if (sym != NoSymbol && sym.enclClass.isJavaDefined)
erasure(sym)(tp)
- else if (sym.isTerm && sym.owner.isDerivedValueClass)
- specialErasureAvoiding(sym.owner, tp)
- else if (sym.isValue && sym.owner.isMethodWithExtension)
- specialErasureAvoiding(sym.owner.owner, tp)
+ else if (sym.isClassConstructor)
+ specialConstructorErasure(sym.owner, tp)
else
specialScalaErasure(tp)
- def specialErasureAvoiding(clazz: Symbol, tpe: Type): Type = {
+ def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = {
tpe match {
case PolyType(tparams, restpe) =>
- specialErasureAvoiding(clazz, restpe)
+ specialConstructorErasure(clazz, restpe)
case ExistentialType(tparams, restpe) =>
- specialErasureAvoiding(clazz, restpe)
+ specialConstructorErasure(clazz, restpe)
case mt @ MethodType(params, restpe) =>
MethodType(
- cloneSymbolsAndModify(params, specialErasureAvoiding(clazz, _)),
- if (restpe.typeSymbol == UnitClass) erasedTypeRef(UnitClass)
- else specialErasureAvoiding(clazz, (mt.resultType(params map (_.tpe)))))
+ cloneSymbolsAndModify(params, specialScalaErasure),
+ specialConstructorErasure(clazz, restpe))
case TypeRef(pre, `clazz`, args) =>
typeRef(pre, clazz, List())
- case _ =>
- specialScalaErasure(tpe)
+ case tp =>
+ assert(clazz == ArrayClass || tp.isError, s"unexpected constructor erasure $tp for $clazz")
+ specialScalaErasure(tp)
}
}
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index c4c5dc3a1c..71cc80895d 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -2,7 +2,7 @@ package scala.reflect
package internal
package transform
-import language.existentials
+import scala.language.existentials
trait Transforms { self: SymbolTable =>
@@ -38,4 +38,4 @@ trait Transforms { self: SymbolTable =>
def transformedType(tpe: Type) =
erasure.scalaErasure(uncurry.uncurry(tpe))
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 2ac0e64edd..14b5d3003d 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -69,7 +69,7 @@ trait Collections {
}
lb.toList
}
-
+
final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
val lb = new ListBuffer[B]
for (x <- elems ; if pf isDefinedAt x)
@@ -104,7 +104,7 @@ trait Collections {
index += 1
}
}
-
+
// @inline
final def findOrElse[A](xs: TraversableOnce[A])(p: A => Boolean)(orElse: => A): A = {
xs find p getOrElse orElse
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
index 51e540e235..0d0f16372c 100644
--- a/src/reflect/scala/reflect/internal/util/HashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -13,7 +13,7 @@ object HashSet {
new HashSet[T](label, initialCapacity)
}
-class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with collection.generic.Clearable {
+class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with scala.collection.generic.Clearable {
private var used = 0
private var table = new Array[AnyRef](initialCapacity)
private def index(x: Int): Int = math.abs(x % table.length)
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index e4c6e4aca1..151a64daff 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -6,9 +6,8 @@
package scala.reflect.internal.util
-import reflect.ClassTag
-import reflect.base.Attachments
-import reflect.api.PositionApi
+import scala.reflect.ClassTag
+import scala.reflect.api.Attachments
object Position {
val tabInc = 8
@@ -36,7 +35,7 @@ object Position {
}
}
-abstract class Position extends PositionApi { self =>
+abstract class Position extends scala.reflect.api.Position { self =>
type Pos = Position
@@ -44,16 +43,6 @@ abstract class Position extends PositionApi { self =>
def withPos(newPos: Position): Attachments { type Pos = self.Pos } = newPos
- /** Java file corresponding to the source file of this position.
- */
- // necessary for conformance with scala.reflect.api.Position
- def fileInfo: java.io.File = source.file.file
-
- /** Contents of the source file that contains this position.
- */
- // necessary for conformance with scala.reflect.api.Position
- def fileContent: Array[Char] = source.content
-
/** An optional value containing the source file referred to by this position, or
* None if not defined.
*/
@@ -274,4 +263,4 @@ class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int)
override def isTransparent = true
override def makeTransparent = this
override def show = "<"+start+":"+end+">"
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index 747c1ad298..788c7532d1 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -6,9 +6,9 @@
package scala.reflect.internal.util
-import scala.tools.nsc.io.{ AbstractFile, VirtualFile }
+import scala.reflect.io.{ AbstractFile, VirtualFile }
import scala.collection.mutable.ArrayBuffer
-import annotation.tailrec
+import scala.annotation.tailrec
import java.util.regex.Pattern
import java.io.IOException
import scala.reflect.internal.Chars._
@@ -107,15 +107,15 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
- // If non-whitespace tokens run all the way up to EOF,
- // positions go wrong because the correct end of the last
- // token cannot be used as an index into the char array.
- // The least painful way to address this was to add a
- // newline to the array.
- val content = (
- if (content0.length == 0 || !content0.last.isWhitespace)
- content0 :+ '\n'
- else content0
+ // If non-whitespace tokens run all the way up to EOF,
+ // positions go wrong because the correct end of the last
+ // token cannot be used as an index into the char array.
+ // The least painful way to address this was to add a
+ // newline to the array.
+ val content = (
+ if (content0.length == 0 || !content0.last.isWhitespace)
+ content0 :+ '\n'
+ else content0
)
val length = content.length
def start = 0
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index e503d812e6..2c90d2d525 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,6 +1,6 @@
package scala.reflect.internal.util
-import collection.mutable
+import scala.collection.mutable
object Statistics {
@@ -237,6 +237,23 @@ quant)
private var _enabled = false
private val qs = new mutable.HashMap[String, Quantity]
+ /** replace with
+ *
+ * final val canEnable = false
+ *
+ * to remove all Statistics code from build
+ */
+ final val canEnable = _enabled
+
+ /** replace with
+ *
+ * final def hotEnabled = _enabled
+ *
+ * and rebuild, to also count tiny but super-hot methods
+ * such as phase, flags, owner, name.
+ */
+ final val hotEnabled = false
+
def enabled = _enabled
def enabled_=(cond: Boolean) = {
if (cond && !_enabled) {
@@ -253,9 +270,4 @@ quant)
_enabled = true
}
}
-
- /** replace rhs with enabled and rebuild to also count tiny but super-hot methods
- * such as phase, flags, owner, name.
- */
- final val hotEnabled = false
}
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
index 2e60ce3bcc..8e2bcc2ff7 100644
--- a/src/reflect/scala/reflect/internal/util/TableDef.scala
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -1,7 +1,7 @@
package scala.reflect.internal.util
import TableDef._
-import language.postfixOps
+import scala.language.postfixOps
/** A class for representing tabular data in a way that preserves
* its inner beauty. See Exceptional for an example usage.
diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
new file mode 100644
index 0000000000..f89bd9e199
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
@@ -0,0 +1,14 @@
+package scala.reflect.internal.util
+
+/** A simple three value type for booleans with an unknown value */
+object ThreeValues {
+
+ type ThreeValue = Byte
+
+ final val YES = 1
+ final val NO = -1
+ final val UNKNOWN = 0
+
+ def fromBoolean(b: Boolean): ThreeValue = if (b) YES else NO
+ def toBoolean(x: ThreeValue): Boolean = x == YES
+}
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index cecf8e4658..fa83f70f3a 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -2,7 +2,7 @@ package scala.reflect.internal
package util
import scala.collection.{ mutable, immutable }
-import language.postfixOps
+import scala.language.postfixOps
trait TraceSymbolActivity {
val global: SymbolTable
@@ -108,12 +108,12 @@ trait TraceSymbolActivity {
sym.name.decode + "#" + sym.id
}
- private def freq[T, U](xs: collection.Traversable[T])(fn: T => U): List[(U, Int)] = {
+ private def freq[T, U](xs: scala.collection.Traversable[T])(fn: T => U): List[(U, Int)] = {
val ys = xs groupBy fn mapValues (_.size)
ys.toList sortBy (-_._2)
}
- private def showMapFreq[T](xs: collection.Map[T, Traversable[_]])(showFn: T => String) {
+ private def showMapFreq[T](xs: scala.collection.Map[T, Traversable[_]])(showFn: T => String) {
xs.mapValues(_.size).toList.sortBy(-_._2) take 100 foreach { case (k, size) =>
show(size, showFn(k))
}
diff --git a/src/reflect/scala/tools/nsc/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index 8d55b708b1..e32207c58c 100644
--- a/src/reflect/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -4,7 +4,7 @@
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
@@ -13,6 +13,10 @@ import java.net.URL
import scala.collection.mutable.ArrayBuffer
/**
+ * An abstraction over files for use in the reflection/compiler libraries.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ *
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
@@ -81,8 +85,10 @@ object AbstractFile {
* all other cases, the class <code>SourceFile</code> is used, which honors
* <code>global.settings.encoding.value</code>.
* </p>
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
-abstract class AbstractFile extends reflect.internal.AbstractFileApi with Iterable[AbstractFile] {
+abstract class AbstractFile extends Iterable[AbstractFile] {
/** Returns the name of this abstract file. */
def name: String
diff --git a/src/reflect/scala/tools/nsc/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala
index ebd6edc8d8..a24534137d 100644
--- a/src/reflect/scala/tools/nsc/io/Directory.scala
+++ b/src/reflect/scala/reflect/io/Directory.scala
@@ -6,11 +6,13 @@
** |/ **
\* */
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.{ File => JFile }
-
+/**
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
object Directory {
import scala.util.Properties.{ tmpDir, userHome, userDir }
@@ -34,6 +36,8 @@ import Path._
*
* @author Paul Phillips
* @since 2.8
+ *
+ * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
*/
class Directory(jfile: JFile) extends Path(jfile) {
override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
diff --git a/src/reflect/scala/tools/nsc/io/File.scala b/src/reflect/scala/reflect/io/File.scala
index 1f3cac7ee1..9e306371f7 100644
--- a/src/reflect/scala/tools/nsc/io/File.scala
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -7,7 +7,7 @@
\* */
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.{
@@ -16,8 +16,10 @@ import java.io.{
import java.io.{ File => JFile }
import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
-import language.{reflectiveCalls, implicitConversions}
-
+import scala.language.{reflectiveCalls, implicitConversions}
+/**
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
object File {
def pathSeparator = java.io.File.pathSeparator
def separator = java.io.File.separator
@@ -74,6 +76,8 @@ import Path._
*
* @author Paul Phillips
* @since 2.8
+ *
+ * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
*/
class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
override val creationCodec = constructorCodec
diff --git a/src/reflect/scala/tools/nsc/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala
index f23658efbc..6bce799cea 100644
--- a/src/reflect/scala/tools/nsc/io/FileOperationException.scala
+++ b/src/reflect/scala/reflect/io/FileOperationException.scala
@@ -7,7 +7,7 @@
\* */
-package scala.tools.nsc
+package scala.reflect
package io
-
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
case class FileOperationException(msg: String) extends RuntimeException(msg)
diff --git a/src/reflect/scala/tools/nsc/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala
index 2af933c27b..d503328a37 100644
--- a/src/reflect/scala/tools/nsc/io/NoAbstractFile.scala
+++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.InputStream
@@ -11,6 +11,8 @@ import java.io.{ File => JFile }
/** A distinguished object so you can avoid both null
* and Option.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object NoAbstractFile extends AbstractFile {
def absolute: AbstractFile = this
diff --git a/src/reflect/scala/tools/nsc/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
index bfad4b93c5..9a1ff395a3 100644
--- a/src/reflect/scala/tools/nsc/io/Path.scala
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.{
@@ -12,7 +12,7 @@ import java.io.{
import java.io.{ File => JFile }
import java.net.{ URI, URL }
import scala.util.Random.alphanumeric
-import language.implicitConversions
+import scala.language.implicitConversions
/** An abstraction for filesystem paths. The differences between
* Path, File, and Directory are primarily to communicate intent.
@@ -27,8 +27,9 @@ import language.implicitConversions
*
* @author Paul Phillips
* @since 2.8
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
-
object Path {
def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName)
def isExtensionJarOrZip(name: String): Boolean = {
@@ -43,8 +44,6 @@ object Path {
if (i < 0) ""
else name.substring(i + 1).toLowerCase
}
- // [Eugene++] I hope that noone relied on this method
-// def isJarOrZip(f: Path, examineFile: Boolean = true) = Jar.isJarOrZip(f, examineFile)
// not certain these won't be problematic, but looks good so far
implicit def string2path(s: String): Path = apply(s)
@@ -85,6 +84,8 @@ import Path._
/** The Path constructor is private so we can enforce some
* semantics regarding how a Path might relate to the world.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class Path private[io] (val jfile: JFile) {
val separator = java.io.File.separatorChar
diff --git a/src/reflect/scala/tools/nsc/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
index a4f378ad5e..14cb09317c 100644
--- a/src/reflect/scala/tools/nsc/io/PlainFile.scala
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -4,12 +4,12 @@
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.{ FileInputStream, FileOutputStream, IOException }
import PartialFunction._
-
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
object PlainFile {
/**
* If the specified File exists, returns an abstract file backed
@@ -20,7 +20,7 @@ object PlainFile {
else if (file.isFile) new PlainFile(file)
else null
}
-
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
override def isDirectory = true
override def iterator = givenPath.list filter (_.exists) map (x => new PlainFile(x))
@@ -28,6 +28,8 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
}
/** This class implements an abstract file backed by a File.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class PlainFile(val givenPath: Path) extends AbstractFile {
assert(path ne null)
diff --git a/src/reflect/scala/tools/nsc/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
index ff770bd396..a083890e09 100644
--- a/src/reflect/scala/tools/nsc/io/Streamable.scala
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -3,26 +3,29 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.net.{ URI, URL }
import java.io.{ BufferedInputStream, InputStream, PrintStream }
import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable }
import scala.io.{ Codec, BufferedSource, Source }
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
import Path.fail
/** Traits for objects which can be represented as Streams.
*
* @author Paul Phillips
* @since 2.8
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
-
object Streamable {
/** Traits which can be viewed as a sequence of bytes. Source types
* which know their length should override def length: Long for more
* efficient method implementations.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
trait Bytes {
def inputStream(): InputStream
@@ -66,6 +69,8 @@ object Streamable {
}
/** For objects which can be viewed as Chars.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
trait Chars extends Bytes {
/** Calls to methods requiring byte<->char transformations should be offered
diff --git a/src/reflect/scala/tools/nsc/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
index fa016f86f4..e71c5cbb6b 100644
--- a/src/reflect/scala/tools/nsc/io/VirtualDirectory.scala
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -2,7 +2,7 @@
* Copyright 2005-2012 LAMP/EPFL
*/
-package scala.tools.nsc
+package scala.reflect
package io
import scala.collection.mutable
@@ -11,6 +11,8 @@ import scala.collection.mutable
* An in-memory directory.
*
* @author Lex Spoon
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory])
extends AbstractFile {
diff --git a/src/reflect/scala/tools/nsc/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index be888e92e6..4884561f4e 100644
--- a/src/reflect/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -4,7 +4,7 @@
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
@@ -14,6 +14,8 @@ import java.io.{ File => JFile }
*
* @author Philippe Altherr
* @version 1.0, 23/03/2004
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class VirtualFile(val name: String, override val path: String) extends AbstractFile {
/**
@@ -33,7 +35,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
//########################################################################
// Private data
- private var content = new Array[Byte](0)
+ private var content = Array.emptyByteArray
//########################################################################
// Public Methods
diff --git a/src/reflect/scala/tools/nsc/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 852dba9ec8..2512c4d92f 100644
--- a/src/reflect/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala.reflect
package io
import java.net.URL
@@ -11,7 +11,7 @@ import java.io.{ IOException, InputStream, ByteArrayInputStream }
import java.io.{ File => JFile }
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
import scala.collection.{ immutable, mutable }
-import annotation.tailrec
+import scala.annotation.tailrec
/** An abstraction for zip files and streams. Everything is written the way
* it is for performance: we come through here a lot on every run. Be careful
@@ -20,6 +20,8 @@ import annotation.tailrec
* @author Philippe Altherr (original version)
* @author Paul Phillips (this one)
* @version 2.0,
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object ZipArchive {
def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
@@ -57,7 +59,7 @@ object ZipArchive {
}
}
import ZipArchive._
-
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
abstract class ZipArchive(override val file: JFile) extends AbstractFile with Equals {
self =>
@@ -78,13 +80,14 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
}
}
def deepIterator = walkIterator(iterator)
-
+ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
// have to keep this name for compat with sbt's compiler-interface
def getArchive: ZipFile = null
override def underlyingSource = Some(self)
override def toString = self.path + "(" + path + ")"
}
+ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class DirEntry(path: String) extends Entry(path) {
val entries = mutable.HashMap[String, Entry]()
@@ -96,20 +99,31 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
}
}
- private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = {
- dirs.getOrElseUpdate(path, {
- val parent = ensureDir(dirs, dirName(path), null)
- val dir = new DirEntry(path)
- parent.entries(baseName(path)) = dir
- dir
- })
- }
+ private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry =
+ //OPT inlined from getOrElseUpdate; saves ~50K closures on test run.
+ // was:
+ // dirs.getOrElseUpdate(path, {
+ // val parent = ensureDir(dirs, dirName(path), null)
+ // val dir = new DirEntry(path)
+ // parent.entries(baseName(path)) = dir
+ // dir
+ // })
+ dirs get path match {
+ case Some(v) => v
+ case None =>
+ val parent = ensureDir(dirs, dirName(path), null)
+ val dir = new DirEntry(path)
+ parent.entries(baseName(path)) = dir
+ dirs(path) = dir
+ dir
+ }
+
protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = {
if (entry.isDirectory) ensureDir(dirs, entry.getName, entry)
else ensureDir(dirs, dirName(entry.getName), null)
}
}
-
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class FileZipArchive(file: JFile) extends ZipArchive(file) {
def iterator: Iterator[Entry] = {
val zipFile = new ZipFile(file)
@@ -150,7 +164,7 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) {
case _ => false
}
}
-
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class URLZipArchive(val url: URL) extends ZipArchive(null) {
def iterator: Iterator[Entry] = {
val root = new DirEntry("/")
@@ -166,7 +180,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
class FileEntry() extends Entry(zipEntry.getName) {
override val toByteArray: Array[Byte] = {
val len = zipEntry.getSize().toInt
- val arr = new Array[Byte](len)
+ val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len)
var offset = 0
def loop() {
diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala
index 46b7066902..754335d50d 100644
--- a/src/reflect/scala/reflect/macros/Aliases.scala
+++ b/src/reflect/scala/reflect/macros/Aliases.scala
@@ -10,17 +10,24 @@ trait Aliases {
type TermName = universe.TermName
type TypeName = universe.TypeName
type Tree = universe.Tree
- // type Position = universe.Position
+ type Position = universe.Position
type Scope = universe.Scope
type Modifiers = universe.Modifiers
+ type Run = universe.Run
+ type CompilationUnit = universe.CompilationUnit
type Expr[+T] = universe.Expr[T]
val Expr = universe.Expr
+ def Expr[T: WeakTypeTag](tree: Tree): Expr[T]
- type AbsTypeTag[T] = universe.AbsTypeTag[T]
+ type WeakTypeTag[T] = universe.WeakTypeTag[T]
type TypeTag[T] = universe.TypeTag[T]
- val AbsTypeTag = universe.AbsTypeTag
+ val WeakTypeTag = universe.WeakTypeTag
val TypeTag = universe.TypeTag
+ def WeakTypeTag[T](tpe: Type): WeakTypeTag[T]
+ def TypeTag[T](tpe: Type): TypeTag[T]
+ def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+ def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
}
diff --git a/src/reflect/scala/reflect/macros/CapturedVariables.scala b/src/reflect/scala/reflect/macros/CapturedVariables.scala
deleted file mode 100644
index 60ed6f5e7b..0000000000
--- a/src/reflect/scala/reflect/macros/CapturedVariables.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package scala.reflect
-package macros
-
-trait CapturedVariables {
- self: Context =>
-
- import mirror._
-
- /** Mark a variable as captured; i.e. force boxing in a *Ref type.
- */
- def captureVariable(vble: Symbol): Unit
-
- /** Mark given identifier as a reference to a captured variable itself
- * suppressing dereferencing with the `elem` field.
- */
- def referenceCapturedVariable(vble: Symbol): Tree
-
- /** Convert type of a captured variable to *Ref type.
- */
- def capturedVariableType(vble: Symbol): Type
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
index 37c8f9057e..7a365ed37b 100644
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -6,17 +6,13 @@ package macros
// the full context should include all traits from scala.reflect.macros (and probably reside in scala-compiler.jar)
trait Context extends Aliases
- with CapturedVariables
with Enclosures
- with Infrastructure
with Names
with Reifiers
with FrontEnds
- with Settings
+ with Infrastructure
with Typers
with Parsers
- with Exprs
- with TypeTags
with Evals
with ExprUtils {
@@ -24,7 +20,7 @@ trait Context extends Aliases
val universe: Universe
/** The mirror of the compile-time universe */
- val mirror: MirrorOf[universe.type]
+ val mirror: universe.Mirror
/** The type of the prefix tree from which the macro is selected */
type PrefixType
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index a07ff85a08..218cf6ebb3 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -47,4 +47,8 @@ trait Enclosures {
/** Compilation unit that contains this macro application.
*/
val enclosingUnit: CompilationUnit
+
+ /** Compilation run that contains this macro application.
+ */
+ val enclosingRun: Run
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Exprs.scala b/src/reflect/scala/reflect/macros/Exprs.scala
deleted file mode 100644
index ceaab06d12..0000000000
--- a/src/reflect/scala/reflect/macros/Exprs.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package scala.reflect
-package macros
-
-trait Exprs {
- self: Context =>
-
- def Expr[T: AbsTypeTag](tree: Tree): Expr[T]
-}
diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala
index d15db0725f..e6b67cfc87 100644
--- a/src/reflect/scala/reflect/macros/FrontEnds.scala
+++ b/src/reflect/scala/reflect/macros/FrontEnds.scala
@@ -1,18 +1,9 @@
package scala.reflect
package macros
-trait FrontEnds extends scala.reflect.api.FrontEnds {
+trait FrontEnds {
self: Context =>
- import mirror._
-
- type Position = universe.Position
-
- /** Exposes means to control the compiler UI */
- def frontEnd: FrontEnd
- def setFrontEnd(frontEnd: FrontEnd): this.type
- def withFrontEnd[T](frontEnd: FrontEnd)(op: => T): T
-
/** For sending a message which should not be labeled as a warning/error,
* but also shouldn't require -verbose to be visible.
* Use ``enclosingPosition'' if you're in doubt what position to pass to ``pos''.
@@ -36,7 +27,4 @@ trait FrontEnds extends scala.reflect.api.FrontEnds {
* Use ``enclosingPosition'' if you're in doubt what position to pass to ``pos''.
*/
def abort(pos: Position, msg: String): Nothing
-
- /** Drops into interactive mode if supported by the compiler UI */
- def interactive(): Unit
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
index 5ae2c08265..a1ef1c87a3 100644
--- a/src/reflect/scala/reflect/macros/Infrastructure.scala
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -4,100 +4,16 @@ package macros
trait Infrastructure {
self: Context =>
- /** Determines whether the compiler expanding a macro targets JVM.
+ /** Exposes macro-specific settings as a list of strings.
+ * These settings are passed to the compiler via the "-Xmacro-settings:setting1,setting2...,settingN" command-line option.
*/
- val forJVM: Boolean
+ def settings: List[String]
- /** Determines whether the compiler expanding a macro targets CLR.
+ /** Exposes current compiler settings as a list of options.
+ * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
*/
- val forMSIL: Boolean
+ def compilerSettings: List[String]
- /** Determines whether the compiler expanding a macro is a presentation compiler.
- */
- val forInteractive: Boolean
-
- /** Determines whether the compiler expanding a macro is a Scaladoc compiler.
- */
- val forScaladoc: Boolean
-
- /** Exposes current compilation run.
- */
- val currentRun: Run
-
- /** Exposes library classpath.
- */
- val libraryClassPath: List[java.net.URL]
-
- /** Exposes a classloader that corresponds to the library classpath.
- *
- * With this classloader you can perform on-the-fly evaluation of macro arguments.
- * For example, consider this code snippet:
- *
- * def staticEval[T](x: T) = macro staticEval[T]
- *
- * def staticEval[T](c: Context)(x: c.Expr[T]) = {
- * import scala.reflect.runtime.{universe => ru}
- * val mirror = ru.runtimeMirror(c.libraryClassLoader)
- * import scala.tools.reflect.ToolBox
- * val toolBox = mirror.mkToolBox()
- * val importer = ru.mkImporter(c.universe).asInstanceOf[ru.Importer { val from: c.universe.type }]
- * val tree = c.resetAllAttrs(x.tree.duplicate)
- * val imported = importer.importTree(tree)
- * val valueOfX = toolBox.runExpr(imported).asInstanceOf[T]
- * ...
- * }
- *
- * // [Eugene++] using this guy will tremendously slow down the compilation
- * // https://twitter.com/xeno_by/status/201248317831774208
- * // todo. we need to address this somehow
- */
- def libraryClassLoader: ClassLoader
-
- /** As seen by macro API, compilation run is an opaque type that can be deconstructed into:
- * 1) Current compilation unit
- * 2) List of all compilation units that comprise the run
- */
- type Run
-
- val Run: RunExtractor
-
- abstract class RunExtractor {
- def unapply(run: Run): Option[(CompilationUnit, List[CompilationUnit])]
- }
-
- /** As seen by macro API, compilation unit is an opaque type that can be deconstructed into:
- * 1) File that corresponds to the unit (if not applicable, null)
- * 2) Content of the file (if not applicable, empty array)
- * 3) Body, i.e. the AST that represents the compilation unit
- */
- type CompilationUnit
-
- val CompilationUnit: CompilationUnitExtractor
-
- abstract class CompilationUnitExtractor {
- def unapply(compilationUnit: CompilationUnit): Option[(java.io.File, Array[Char], Tree)]
- }
-
- /** Returns a macro definition which triggered this macro expansion.
- */
- val currentMacro: Symbol
-
- // todo. redo caches as discussed on Reflecting Meeting 2012/03/29
- // https://docs.google.com/document/d/1oUZGQpdt2qwioTlJcSt8ZFQwVLTvpxn8xa67P8OGVpU/edit
-
- /** A cache shared by all invocations of all macros across all compilation runs.
- *
- * Needs to be used with extreme care, since memory leaks here will swiftly crash the presentation compiler.
- * For example, Scala IDE typically launches a compiler run on every edit action so there might be hundreds of runs per minute.
- */
- val globalCache: collection.mutable.Map[Any, Any]
-
- /** A cache shared by all invocations of the same macro within a single compilation run.
- *
- * This cache is cleared automatically after a compilation run is completed or abandoned.
- * It is also specific to a particular macro definition.
- *
- * To share data between different macros and/or different compilation runs, use ``globalCache''.
- */
- val cache: collection.mutable.Map[Any, Any]
-}
+ /** Exposes current classpath. */
+ def classPath: List[java.net.URL]
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index ea87c5842e..c2d4d8a3ab 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -5,14 +5,10 @@ trait Parsers {
self: Context =>
/** .. */
- // todo. distinguish between `parseExpr` and `parse`
+ // todo. distinguish between parsing an expression and parsing arbitrary code
+ // for example, parsing in expression mode will fail on packages
def parse(code: String): Tree
+}
- /** Represents an error during parsing
- */
- type ParseError <: Throwable
- val ParseError: ParseErrorExtractor
- abstract class ParseErrorExtractor {
- def unapply(error: ParseError): Option[(Position, String)]
- }
-} \ No newline at end of file
+// should be path-dependent, otherwise exception handling becomes a mess
+case class ParseError(val pos: scala.reflect.api.Position, val msg: String) extends Throwable(msg)
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
index 1bee17d548..ed31663c68 100644
--- a/src/reflect/scala/reflect/macros/Reifiers.scala
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -4,21 +4,11 @@ package macros
trait Reifiers {
self: Context =>
- /** Reification prefix that refers to the base reflexive universe, ``scala.reflect.basis''.
- * Providing it for the ``prefix'' parameter of ``reifyTree'' or ``reifyType'' will create a tree that can be inspected at runtime.
- */
- val basisUniverse: Tree
-
- /** Reification prefix that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''.
- * Providing it for the ``prefix'' parameter of ``reifyTree'' or ``reifyType'' will create a full-fledged tree that can be inspected at runtime.
- */
- val runtimeUniverse: Tree
-
/** Given a tree, generate a tree that when compiled and executed produces the original tree.
* For more information and examples see the documentation for ``Universe.reify''.
*
* The produced tree will be bound to the specified ``universe'' and ``mirror''.
- * Possible values for ``universe'' include ``basisUniverse'' and ``runtimeUniverse''.
+ * Possible values for ``universe'' include ``universe.treeBuild.mkRuntimeUniverseRef''.
* Possible values for ``mirror'' include ``EmptyTree'' (in that case the reifier will automatically pick an appropriate mirror).
*
* This function is deeply connected to ``Universe.reify'', a macro that reifies arbitrary expressions into runtime trees.
@@ -86,6 +76,6 @@ trait Reifiers {
// made these guys non path-dependent, otherwise exception handling quickly becomes a mess
-case class ReificationError(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
+case class ReificationError(val pos: scala.reflect.api.Position, val msg: String) extends Throwable(msg)
-case class UnexpectedReificationError(val pos: reflect.api.PositionApi, val msg: String, val cause: Throwable = null) extends Throwable(msg, cause)
+case class UnexpectedReificationError(val pos: scala.reflect.api.Position, val msg: String, val cause: Throwable = null) extends Throwable(msg, cause)
diff --git a/src/reflect/scala/reflect/macros/Settings.scala b/src/reflect/scala/reflect/macros/Settings.scala
deleted file mode 100644
index 8d166056c3..0000000000
--- a/src/reflect/scala/reflect/macros/Settings.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package scala.reflect
-package macros
-
-trait Settings {
- self: Context =>
-
- /** Exposes macro-specific settings as a list of strings.
- * These settings are passed to the compiler via the "-Xmacro-settings:setting1,setting2...,settingN" command-line option.
- */
- def settings: List[String]
-
- /** Exposes current compiler settings as a list of options.
- * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
- */
- // [Eugene] ugly? yes, but I don't really fancy copy/pasting all our settings here and keep it synchronized at all times
- // why all settings? because macros need to be in full control of the stuff going on
- // maybe later we can implement a gettable/settable list of important settings, but for now let's leave it like that
- def compilerSettings: List[String]
-
- /** Updates current compiler settings with an option string.
- * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
- * todo. http://groups.google.com/group/scala-internals/browse_thread/thread/07c18cff41f59203
- */
- def setCompilerSettings(options: String): this.type
-
- /** Updates current compiler settings with a list of options.
- * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
- */
- def setCompilerSettings(options: List[String]): this.type
-
- /** Temporary sets compiler settings to a given option string and executes a given closure.
- * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
- */
- def withCompilerSettings[T](options: String)(op: => T): T
-
- /** Temporary sets compiler settings to a given list of options and executes a given closure.
- * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
- */
- def withCompilerSettings[T](options: List[String])(op: => T): T
-} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
index 06f5caf68b..5f18ab9ee8 100644
--- a/src/reflect/scala/reflect/macros/TreeBuilder.scala
+++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala
@@ -1,10 +1,6 @@
package scala.reflect
package macros
-// [Eugene] I added some stuff that was necessary for typetag materialization macros
-// but we should think it over and pick other generally useful stuff
-// same goes for tree traversers/transformers, type maps, etc
-// and once we expose all that, there's another question: how do we stay in sync?
abstract class TreeBuilder {
val global: Universe
@@ -57,4 +53,7 @@ abstract class TreeBuilder {
def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
+
+ /** A tree that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''. */
+ def mkRuntimeUniverseRef: Tree
}
diff --git a/src/reflect/scala/reflect/macros/TypeTags.scala b/src/reflect/scala/reflect/macros/TypeTags.scala
deleted file mode 100644
index 8f590d1de4..0000000000
--- a/src/reflect/scala/reflect/macros/TypeTags.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-package scala.reflect
-package macros
-
-trait TypeTags {
- self: Context =>
-
- def AbsTypeTag[T](tpe: Type): AbsTypeTag[T]
- def TypeTag[T](tpe: Type): TypeTag[T]
-}
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 4074dd9e93..97d0a8d98a 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -7,10 +7,10 @@ abstract class Universe extends scala.reflect.api.Universe {
trait AttachableApi {
/** ... */
- def attachments: base.Attachments { type Pos = Position }
+ def attachments: scala.reflect.api.Attachments { type Pos = Position }
/** ... */
- def addAttachment(attachment: Any): AttachableApi.this.type
+ def updateAttachment[T: ClassTag](attachment: T): AttachableApi.this.type
/** ... */
def removeAttachment[T: ClassTag]: AttachableApi.this.type
@@ -22,17 +22,21 @@ abstract class Universe extends scala.reflect.api.Universe {
/** The extended API of symbols that's supported in macro context universes
*/
- trait SymbolContextApi extends SymbolApi with AttachableApi { this: Symbol =>
+ trait SymbolContextApi extends SymbolApi with AttachableApi { self: Symbol =>
- def setFlags(flags: FlagSet): this.type
+ def deSkolemize: Symbol
- def setTypeSignature(tpe: Type): this.type
+ /** The position of this symbol
+ */
+ def pos: Position
+
+ def setTypeSignature(tpe: Type): Symbol
- def setAnnotations(annots: AnnotationInfo*): this.type
+ def setAnnotations(annots: Annotation*): Symbol
- def setName(name: Name): this.type
+ def setName(name: Name): Symbol
- def setPrivateWithin(sym: Symbol): this.type
+ def setPrivateWithin(sym: Symbol): Symbol
}
// Tree extensions ---------------------------------------------------------------
@@ -41,20 +45,20 @@ abstract class Universe extends scala.reflect.api.Universe {
/** The extended API of trees that's supported in macro context universes
*/
- trait TreeContextApi extends TreeApi with AttachableApi { this: Tree =>
+ trait TreeContextApi extends TreeApi with AttachableApi { self: Tree =>
/** ... */
def pos_=(pos: Position): Unit
/** ... */
- def setPos(newpos: Position): this.type
+ def setPos(newpos: Position): Tree
/** ... */
def tpe_=(t: Type): Unit
/** Set tpe to give `tp` and return this.
*/
- def setType(tp: Type): this.type
+ def setType(tp: Type): Tree
/** Like `setType`, but if this is a previously empty TypeTree that
* fact is remembered so that resetAllAttrs will snap back.
@@ -73,13 +77,13 @@ abstract class Universe extends scala.reflect.api.Universe {
* and therefore should be abandoned if the current line of type
* inquiry doesn't work out.
*/
- def defineType(tp: Type): this.type
+ def defineType(tp: Type): Tree
/** ... */
def symbol_=(sym: Symbol): Unit
/** ... */
- def setSymbol(sym: Symbol): this.type
+ def setSymbol(sym: Symbol): Tree
}
override type SymTree >: Null <: Tree with SymTreeContextApi
@@ -105,4 +109,52 @@ abstract class Universe extends scala.reflect.api.Universe {
trait IdentContextApi extends IdentApi { this: Ident =>
def isBackquoted: Boolean
}
+
+ /** Mark a variable as captured; i.e. force boxing in a *Ref type.
+ */
+ def captureVariable(vble: Symbol): Unit
+
+ /** Mark given identifier as a reference to a captured variable itself
+ * suppressing dereferencing with the `elem` field.
+ */
+ def referenceCapturedVariable(vble: Symbol): Tree
+
+ /** Convert type of a captured variable to *Ref type.
+ */
+ def capturedVariableType(vble: Symbol): Type
+
+ type Run <: RunContextApi
+
+ /** Compilation run uniquely identifies current invocation of the compiler
+ * (e.g. can be used to implement per-run caches for macros) and provides access to units of work
+ * of the invocation (currently processed unit of work and the list of all units).
+ */
+ trait RunContextApi {
+ /** Currently processed unit of work (a real or a virtual file). */
+ def currentUnit: CompilationUnit
+
+ /** All units of work comprising this compilation run. */
+ def units: Iterator[CompilationUnit]
+ }
+
+ type CompilationUnit <: CompilationUnitContextApi
+
+ /** Compilation unit describes a unit of work of the compilation run.
+ * It provides such information as file name, textual representation of the unit and the underlying AST.
+ */
+ trait CompilationUnitContextApi {
+ /** Source file corresponding to this compilation unit.
+ *
+ * Exposes information about the file as a part of a real or virtual file system
+ * along with the contents of that file.
+ *
+ * The return type is [[scala.reflect.io.AbstractFile]], which belongs to an experimental part of Scala reflection.
+ * It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+ * and exposed as a part of scala.reflect.api.
+ */
+ def source: scala.reflect.internal.util.SourceFile
+
+ /** The AST that corresponds to this compilation unit. */
+ def body: Tree
+ }
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala
deleted file mode 100644
index 06ce0b3244..0000000000
--- a/src/reflect/scala/reflect/macros/package.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala.reflect
-
-package object macros {
-
- type MirrorOf[U <: base.Universe with Singleton] = base.MirrorOf[U]
-}
diff --git a/src/reflect/scala/reflect/runtime/AbstractFile.scala b/src/reflect/scala/reflect/runtime/AbstractFile.scala
deleted file mode 100644
index 0f88af1b0a..0000000000
--- a/src/reflect/scala/reflect/runtime/AbstractFile.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package scala.reflect
-package runtime
-
-class AbstractFile(val jfile: java.io.File) extends internal.AbstractFileApi {
- def path: String = jfile.getPath()
- def canonicalPath: String = jfile.getCanonicalPath()
-}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 4ce2cda04a..b6b2537dc4 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -9,21 +9,22 @@ import java.lang.reflect.{
Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
+import java.lang.annotation.{Annotation => jAnnotation}
import java.io.IOException
import internal.MissingRequirementError
import internal.pickling.ByteCodecs
import internal.ClassfileConstants._
import internal.pickling.UnPickler
-import collection.mutable.{ HashMap, ListBuffer }
+import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
//import scala.tools.nsc.util.ScalaClassLoader
//import scala.tools.nsc.util.ScalaClassLoader._
import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
-import language.existentials
+import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
import scala.reflect.internal.util.Collections._
-trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: SymbolTable =>
+trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
@@ -43,6 +44,8 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// overriden by ReflectGlobal
def rootClassLoader: ClassLoader = this.getClass.getClassLoader
+ trait JavaClassCompleter extends FlagAssigningCompleter
+
def init() = {
definitions.AnyValClass // force it.
@@ -62,15 +65,15 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
class JavaMirror(owner: Symbol,
/** Class loader that is a mastermind behind the reflexive mirror */
val classLoader: ClassLoader
- ) extends Roots(owner) with super.JavaMirror { wholemirror =>
+ ) extends Roots(owner) with super.JavaMirror { thisMirror =>
- val universe: self.type = self
+ val universe: thisUniverse.type = thisUniverse
import definitions._
/** The lazy type for root.
*/
- override lazy val rootLoader = new LazyType {
+ override lazy val rootLoader = new LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) = sym setInfo new LazyPackageType
}
@@ -88,7 +91,6 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// ----------- Caching ------------------------------------------------------------------
- // [Eugene++ to Martin] not weak? why?
private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
private val packageCache = new TwoWayCache[Package, ModuleSymbol]
private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
@@ -96,7 +98,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
private val fieldCache = new TwoWayCache[jField, TermSymbol]
private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
- def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S =
+ private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S =
cache.toScala(key){
val jclazz = implicitly[HasJavaClass[J]] getClazz key
body(mirrorDefining(jclazz), key)
@@ -121,19 +123,69 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// ----------- Implementations of mirror operations and classes -------------------
- private def ErrorInnerClass(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
- private def ErrorInnerModule(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
- private def ErrorStaticClass(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
- private def ErrorStaticModule(wannabe: Symbol) = throw new ScalaReflectionException(s"$wannabe is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
- private def ErrorNotMember(wannabe: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${wannabe.kind} ${wannabe.fullName}")
- private def ErrorNotField(wannabe: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $wannabe")
- private def ErrorNonExistentField(wannabe: Symbol) = throw new ScalaReflectionException(s"""
- |Scala field ${wannabe.name} isn't represented as a Java field, neither it has a Java accessor method
+ private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+ private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+ private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+ private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+ private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+ private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym")
+ private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException(s"""
+ |Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
|note that private parameters of class constructors don't get mapped onto fields and/or accessors,
|unless they are used outside of their declaring constructors.
""".trim.stripMargin)
- private def ErrorSetImmutableField(wannabe: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${wannabe.name}")
- private def ErrorNotConstructor(wannabe: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $wannabe")
+ private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}")
+ private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
+ private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+
+ /** Helper functions for extracting typed values from a (Class[_], Any)
+ * representing an annotation argument.
+ */
+ private object toAnnotArg {
+ val StringClass = classOf[String]
+ val ClassClass = classOf[jClass[_]]
+ object PrimitiveClass { def unapply(x: jClass[_]) = x.isPrimitive }
+ object EnumClass { def unapply(x: jClass[_]) = x.isEnum }
+ object ArrayClass { def unapply(x: jClass[_]) = x.isArray }
+ object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation }
+
+ object ConstantArg {
+ def enumToSymbol(enum: Enum[_]): Symbol = {
+ val staticPartOfEnum = classToScala(enum.getClass).companionSymbol
+ staticPartOfEnum.typeSignature.declaration(enum.name: TermName)
+ }
+
+ def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match {
+ case (StringClass | PrimitiveClass(), value) => Some(value)
+ case (ClassClass, value: jClass[_]) => Some(classToScala(value).toType)
+ case (EnumClass(), value: Enum[_]) => Some(enumToSymbol(value))
+ case _ => None
+ }
+ }
+ def apply(schemaAndValue: (jClass[_], Any)): ClassfileAnnotArg = schemaAndValue match {
+ case ConstantArg(value) => LiteralAnnotArg(Constant(value))
+ case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(ScalaRunTime.arrayElementClass(clazz) -> x)))
+ case (AnnotationClass(), value: jAnnotation) => NestedAnnotArg(JavaAnnotationProxy(value))
+ case _ => UnmappableAnnotArg
+ }
+ }
+ private case class JavaAnnotationProxy(jann: jAnnotation) extends AnnotationInfo {
+ override val atp: Type = classToScala(jann.annotationType).toType
+ override val args: List[Tree] = Nil
+ override def original: Tree = EmptyTree
+ override def setOriginal(t: Tree): this.type = throw new Exception("setOriginal inapplicable for " + this)
+ override def pos: Position = NoPosition
+ override def setPos(pos: Position): this.type = throw new Exception("setPos inapplicable for " + this)
+ override def toString = completeAnnotationToString(this)
+
+ // todo. find out the exact order of assocs as they are written in the class file
+ // currently I'm simply sorting the methods to guarantee stability of the output
+ override lazy val assocs: List[(Name, ClassfileAnnotArg)] = (
+ jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m =>
+ (m.getName: TermName) -> toAnnotArg(m.getReturnType -> m.invoke(jann))
+ )
+ )
+ }
def reflect[T: ClassTag](obj: T): InstanceMirror = new JavaInstanceMirror(obj)
@@ -155,13 +207,30 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol = classToScala(rtcls).companionModule.asModule
- private def checkMemberOf(wannabe: Symbol, owner: ClassSymbol) {
- if (wannabe.owner == AnyClass || wannabe.owner == AnyRefClass || wannabe.owner == ObjectClass) {
+ private def ensuringNotFree(sym: Symbol)(body: => Any) {
+ val freeType = sym.ownerChain find (_.isFreeType)
+ freeType match {
+ case Some(freeType) => ErrorFree(sym, freeType)
+ case _ => body
+ }
+ }
+
+ private def checkMemberOf(sym: Symbol, owner: ClassSymbol) {
+ if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) {
// do nothing
- } else if (wannabe.owner == AnyValClass) {
- if (!owner.isPrimitiveValueClass && !owner.isDerivedValueClass) ErrorNotMember(wannabe, owner)
+ } else if (sym.owner == AnyValClass) {
+ if (!owner.isPrimitiveValueClass && !owner.isDerivedValueClass) ErrorNotMember(sym, owner)
} else {
- if (!(owner.info.baseClasses contains wannabe.owner)) ErrorNotMember(wannabe, owner)
+ ensuringNotFree(sym) {
+ if (!(owner.info.baseClasses contains sym.owner)) ErrorNotMember(sym, owner)
+ }
+ }
+ }
+
+ private def checkConstructorOf(sym: Symbol, owner: ClassSymbol) {
+ if (!sym.isClassConstructor) ErrorNotConstructor(sym, owner)
+ ensuringNotFree(sym) {
+ if (!owner.info.decls.toList.contains(sym)) ErrorNotConstructor(sym, owner)
}
}
@@ -173,7 +242,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
private class JavaInstanceMirror[T: ClassTag](val instance: T)
extends InstanceMirror {
- def symbol = wholemirror.classSymbol(preciseClass(instance))
+ def symbol = thisMirror.classSymbol(preciseClass(instance))
def reflectField(field: TermSymbol): FieldMirror = {
checkMemberOf(field, symbol)
if ((field.isMethod && !field.isAccessor) || field.isModule) ErrorNotField(field)
@@ -248,14 +317,13 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// the "symbol == Any_getClass || symbol == Object_getClass" test doesn't cut it
// because both AnyVal and its primitive descendants define their own getClass methods
private def isGetClass(meth: MethodSymbol) = meth.name.toString == "getClass" && meth.params.flatten.isEmpty
- private def isMagicPrimitiveMethod(meth: MethodSymbol) = meth.owner.isPrimitiveValueClass
- private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (isMagicPrimitiveMethod(meth) && meth.returnType =:= StringClass.toType)
- lazy val magicMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
- lazy val nonMagicObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
- Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
- private def isMagicMethod(meth: MethodSymbol): Boolean = {
- if (isGetClass(meth) || isStringConcat(meth) || isMagicPrimitiveMethod(meth) || meth == Predef_classOf || meth.isTermMacro) return true
- magicMethodOwners(meth.owner) && !nonMagicObjectMethods(meth)
+ private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType)
+ lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
+ lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
+ Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
+ private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
+ if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+ bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
}
// unlike other mirrors, method mirrors are created by a factory
@@ -263,7 +331,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
// therefore we move special cases into separate subclasses
// rather than have them on a hot path them in a unified implementation of the `apply` method
private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
- if (isMagicMethod(symbol)) new JavaMagicMethodMirror(receiver, symbol)
+ if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
else if (symbol.params.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
else new JavaVanillaMethodMirror(receiver, symbol)
}
@@ -298,11 +366,11 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
}
- private class JavaMagicMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
+ private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
def apply(args: Any*): Any = {
// checking type conformance is too much of a hassle, so we don't do it here
- // actually it's not even necessary, because we manually dispatch arguments to magic methods below
+ // actually it's not even necessary, because we manually dispatch arguments below
val params = symbol.paramss.flatten
val perfectMatch = args.length == params.length
// todo. this doesn't account for multiple vararg parameter lists
@@ -320,36 +388,36 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def objArgs = args.asInstanceOf[Seq[AnyRef]]
def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
- def invokeMagicPrimitiveMethod = {
+ def invokePrimitiveMethod = {
val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
assert(jmeths.length == 1, jmeths.toList)
jinvoke(jmeths.head, null, objReceiver +: objArgs)
}
symbol match {
- case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
- case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
- case Any_equals => receiver.equals(objArg0)
- case Any_hashCode => receiver.hashCode
- case Any_toString => receiver.toString
- case Object_eq => objReceiver eq objArg0
- case Object_ne => objReceiver ne objArg0
- case Object_synchronized => objReceiver.synchronized(objArg0)
- case sym if isGetClass(sym) => preciseClass(receiver)
- case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
- case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
- case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
- case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
- case Array_length => ScalaRunTime.array_length(objReceiver)
- case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
- case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
- case Array_clone => ScalaRunTime.array_clone(objReceiver)
- case sym if isStringConcat(sym) => receiver.toString + objArg0
- case sym if isMagicPrimitiveMethod(sym) => invokeMagicPrimitiveMethod
- case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
- case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
- case _ => assert(false, this)
+ case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
+ case Any_equals => receiver.equals(objArg0)
+ case Any_hashCode => receiver.hashCode
+ case Any_toString => receiver.toString
+ case Object_eq => objReceiver eq objArg0
+ case Object_ne => objReceiver ne objArg0
+ case Object_synchronized => objReceiver.synchronized(objArg0)
+ case sym if isGetClass(sym) => preciseClass(receiver)
+ case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
+ case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
+ case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
+ case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
+ case Array_length => ScalaRunTime.array_length(objReceiver)
+ case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
+ case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
+ case Array_clone => ScalaRunTime.array_clone(objReceiver)
+ case sym if isStringConcat(sym) => receiver.toString + objArg0
+ case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
+ case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
+ case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+ case _ => assert(false, this)
}
}
}
@@ -387,8 +455,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
def erasure = symbol
def isStatic = false
def reflectConstructor(constructor: MethodSymbol) = {
- if (!constructor.isClassConstructor) ErrorNotConstructor(constructor, symbol)
- if (!symbol.info.decls.toList.contains(constructor)) ErrorNotConstructor(constructor, symbol)
+ checkConstructorOf(constructor, symbol)
new JavaConstructorMirror(outer, constructor)
}
def companion: Option[ModuleMirror] = symbol.companionModule match {
@@ -453,7 +520,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
private object unpickler extends UnPickler {
- val global: self.type = self
+ val global: thisUniverse.type = thisUniverse
}
/** how connected????
@@ -544,7 +611,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
* A completer that fills in the type of a Scala type parameter from the bounds of a Java type variable.
* @param jtvar The Java type variable
*/
- private class TypeParamCompleter(jtvar: jTypeVariable[_ <: GenericDeclaration]) extends LazyType {
+ private class TypeParamCompleter(jtvar: jTypeVariable[_ <: GenericDeclaration]) extends LazyType with FlagAgnosticCompleter {
override def load(sym: Symbol) = complete(sym)
override def complete(sym: Symbol) = {
sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny))
@@ -557,7 +624,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
* Note: If `sym` is a method or constructor, its parameter annotations are copied as well.
*/
private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) {
- // to do: implement
+ sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
}
/**
@@ -569,7 +636,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
* @param module The Scala companion object for which info is copied
* @param jclazz The Java class
*/
- private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType {
+ private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAssigningCompleter {
/** used to avoid cycles while initializing classes */
private var parentsLevel = 0
@@ -600,7 +667,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
completeRest()
}
- def completeRest(): Unit = self.synchronized {
+ def completeRest(): Unit = thisUniverse.synchronized {
val tparams = clazz.rawInfo.typeParams
val parents = try {
@@ -645,7 +712,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
}
}
- class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType {
+ class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
completeRest()
}
@@ -659,43 +726,33 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
private def followStatic(clazz: Symbol, mods: Int) =
if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz
- implicit class RichClass(jclazz: jClass[_]) {
- // [Eugene++] `jclazz.isLocalClass` doesn't work because of problems with `getSimpleName`
- // java.lang.Error: sOwner(class Test$A$1) has failed
- // Caused by: java.lang.InternalError: Malformed class name
- // at java.lang.Class.getSimpleName(Class.java:1133)
- // at java.lang.Class.isAnonymousClass(Class.java:1188)
- // at java.lang.Class.isLocalClass(Class.java:1199)
- // (see t5256c.scala for more details)
+ /** Methods which need to be treated with care
+ * because they either are getSimpleName or call getSimpleName:
+ *
+ * public String getSimpleName()
+ * public boolean isAnonymousClass()
+ * public boolean isLocalClass()
+ * public String getCanonicalName()
+ *
+ * A typical manifestation:
+ *
+ * // java.lang.Error: sOwner(class Test$A$1) has failed
+ * // Caused by: java.lang.InternalError: Malformed class name
+ * // at java.lang.Class.getSimpleName(Class.java:1133)
+ * // at java.lang.Class.isAnonymousClass(Class.java:1188)
+ * // at java.lang.Class.isLocalClass(Class.java:1199)
+ * // (see t5256c.scala for more details)
+ *
+ * TODO - find all such calls and wrap them.
+ * TODO - create mechanism to avoid the recurrence of unwrapped calls.
+ */
+ implicit class RichClass(jclazz: jClass[_]) {
+ // `jclazz.isLocalClass` doesn't work because of problems with `getSimpleName`
// hence we have to approximate by removing the `isAnonymousClass` check
// def isLocalClass0: Boolean = jclazz.isLocalClass
def isLocalClass0: Boolean = jclazz.getEnclosingMethod != null || jclazz.getEnclosingConstructor != null
}
- // [Eugene++] overflow from Paul's changes made concurrently with reflection refactoring
- // https://github.com/scala/scala/commit/90d2bee45b25844f809f8c5300aefcb1bfe9e336
- //
- // /** Methods which need to be wrapped because they either are getSimpleName
- // * or call getSimpleName:
- // *
- // * public String getSimpleName()
- // * public boolean isAnonymousClass()
- // * public boolean isLocalClass()
- // * public boolean isMemberClass()
- // * public String getCanonicalName()
- // *
- // * TODO - find all such calls and wrap them.
- // * TODO - create mechanism to avoid the recurrence of unwrapped calls.
- // */
- // private def wrapClassCheck[T](alt: T)(body: => T): T =
- // try body catch { case x: InternalError if x.getMessage == "Malformed class name" => alt }
-
- // private def wrapIsLocalClass(clazz: jClass[_]): Boolean =
- // wrapClassCheck(false)(clazz.isLocalClass)
-
- // private def wrapGetSimpleName(clazz: jClass[_]): String =
- // wrapClassCheck("")(clazz.getSimpleName)
-
/**
* The Scala owner of the Scala class corresponding to the Java class `jclazz`
*/
@@ -970,13 +1027,12 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
rawToExistential(typeRef(clazz.owner.thisType, clazz, List()))
}
case japplied: ParameterizedType =>
- val (pre, sym) = typeToScala(japplied.getRawType) match {
- case ExistentialType(tparams, TypeRef(pre, sym, _)) => (pre, sym)
- case TypeRef(pre, sym, _) => (pre, sym)
- }
+ // http://stackoverflow.com/questions/5767122/parameterizedtype-getrawtype-returns-j-l-r-type-not-class
+ val sym = classToScala(japplied.getRawType.asInstanceOf[jClass[_]])
+ val pre = sym.owner.thisType
val args0 = japplied.getActualTypeArguments
val (args, bounds) = targsToScala(pre.typeSymbol, args0.toList)
- ExistentialType(bounds, typeRef(pre, sym, args))
+ newExistentialType(bounds, typeRef(pre, sym, args))
case jarr: GenericArrayType =>
arrayType(typeToScala(jarr.getGenericComponentType))
case jtvar: jTypeVariable[_] =>
@@ -1182,21 +1238,14 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
override def scopeTransform(owner: Symbol)(op: => Scope): Scope =
if (owner.isPackageClass) owner.info.decls else op
- private lazy val rootToLoader = new WeakHashMap[Symbol, ClassLoader]
-
- override def mirrorThatLoaded(sym: Symbol): Mirror = {
- val root = sym.enclosingRootClass
- def findLoader = {
- val loaders = (mirrors collect { case (cl, ref) if ref.get.get.RootClass == root => cl })
- assert(loaders.nonEmpty, sym)
- loaders.head
- }
- mirrors(rootToLoader getOrElseUpdate(root, findLoader)).get.get
+ override def mirrorThatLoaded(sym: Symbol): Mirror = sym.enclosingRootClass match {
+ case root: RootSymbol => root.mirror
+ case _ => abort(s"${sym}.enclosingRootClass = ${sym.enclosingRootClass}, which is not a RootSymbol")
}
- private lazy val magicClasses: Map[(String, Name), Symbol] = {
+ private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
- Map() ++ (definitions.magicSymbols filter (_.isType) map mapEntry)
+ Map() ++ (definitions.syntheticCoreClasses map mapEntry)
}
/** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
@@ -1208,16 +1257,19 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { self: Sym
override def missingHook(owner: Symbol, name: Name): Symbol = {
if (owner.hasPackageFlag) {
val mirror = mirrorThatLoaded(owner)
- // [Eugene++] this makes toolbox tests pass, but it's a mere workaround for SI-5865
+ // todo. this makes toolbox tests pass, but it's a mere workaround for SI-5865
// assert((owner.info decl name) == NoSymbol, s"already exists: $owner . $name")
if (owner.isRootSymbol && mirror.tryJavaClass(name.toString).isDefined)
return mirror.EmptyPackageClass.info decl name
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- magicClasses get (owner.fullName, name) match {
+ syntheticCoreClasses get (owner.fullName, name) match {
case Some(tsym) =>
- owner.info.decls enter tsym
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
return tsym
case None =>
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 629df76178..1d875b10f1 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -8,8 +8,6 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen}
*/
class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
- type AbstractFileType = AbstractFile
-
def picklerPhase = SomePhase
lazy val settings = new Settings
@@ -18,7 +16,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def log(msg: => AnyRef): Unit = println(" [] "+msg)
- type TreeCopier = TreeCopierOps
+ type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index e87c6b339b..44d9d94a46 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -38,7 +38,7 @@ object ReflectionUtils {
)
def show(cl: ClassLoader): String = {
- import language.reflectiveCalls
+ import scala.language.reflectiveCalls
def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
if (clazz == null) return false
@@ -49,7 +49,7 @@ object ReflectionUtils {
case cl: java.net.URLClassLoader =>
(cl.getURLs mkString ",")
case cl if cl != null && isAbstractFileClassLoader(cl.getClass) =>
- cl.asInstanceOf[{val root: scala.reflect.internal.AbstractFileApi}].root.canonicalPath
+ cl.asInstanceOf[{val root: scala.reflect.io.AbstractFile}].root.canonicalPath
case null =>
inferBootClasspath
case _ =>
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index eb48e9dc79..d1be73bed3 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -3,7 +3,7 @@ package runtime
import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
-import collection.mutable
+import scala.collection.mutable
trait SymbolLoaders { self: SymbolTable =>
@@ -14,7 +14,7 @@ trait SymbolLoaders { self: SymbolTable =>
* by unpickling information from the corresponding Java class. If no Java class
* is found, a package is created instead.
*/
- class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader {
+ class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
// def makePackage() {
// println("wrong guess; making package "+clazz)
// val ptpe = newPackageType(module.moduleClass)
@@ -61,10 +61,8 @@ trait SymbolLoaders { self: SymbolTable =>
assert(!(name.toString endsWith "[]"), name)
val clazz = owner.newClass(name)
val module = owner.newModule(name.toTermName)
- // [Eugene++] am I doing this right?
- // todo: drop condition, see what goes wrong
- // [Eugene++ to Martin] test/files/run/t5256g and test/files/run/t5256h will crash
- // reflection meeting verdict: need to enter the symbols into the first symbol in the owner chain that has a non-empty scope
+ // without this check test/files/run/t5256g and test/files/run/t5256h will crash
+ // todo. reflection meeting verdict: need to enter the symbols into the first symbol in the owner chain that has a non-empty scope
if (owner.info.decls != EmptyScope) {
owner.info.decls enter clazz
owner.info.decls enter module
@@ -82,7 +80,7 @@ trait SymbolLoaders { self: SymbolTable =>
/** The type completer for packages.
*/
- class LazyPackageType extends LazyType {
+ class LazyPackageType extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
assert(sym.isPackageClass)
sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index 5b9090dae5..73632be965 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -1,6 +1,8 @@
package scala.reflect
package runtime
+import scala.reflect.internal.Flags._
+
/**
* This symbol table trait fills in the definitions so that class information is obtained by refection.
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
@@ -14,4 +16,30 @@ trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoade
def debugInfo(msg: => String) =
if (settings.debug.value) info(msg)
+ /** Declares that this is a runtime reflection universe.
+ *
+ * This means that we can make certain assumptions to optimize the universe.
+ * For example, we may auto-initialize symbols on flag and annotation requests
+ * (see `shouldTriggerCompleter` below for more details).
+ *
+ * On the other hand, this also means that usage scenarios of the universe
+ * will differ from the conventional ones. For example, we have to do additional cleanup
+ * in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2.
+ */
+ override def isCompilerUniverse = false
+
+ /** Unlike compiler universes, reflective universes can auto-initialize symbols on flag requests.
+ *
+ * scalac wasn't designed with such auto-initialization in mind, and quite often it makes assumptions
+ * that flag requests won't cause initialization. Therefore enabling auto-init leads to cyclic errors.
+ * We could probably fix those, but at the moment it's too risky.
+ *
+ * Reflective universes share codebase with scalac, but their surface is much smaller, which means less assumptions.
+ * These assumptions are taken care of in this overriden `shouldTriggerCompleter` method.
+ */
+ override protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
+ completer match {
+ case _: TopClassCompleter | _: JavaClassCompleter => !isFlagRelated || (mask & TopLevelPickledFlags) != 0
+ case _ => super.shouldTriggerCompleter(symbol, completer, isFlagRelated, mask)
+ }
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 907c0dd369..1a17dd12d2 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -1,6 +1,7 @@
package scala.reflect
package runtime
+// SI-6240: test thread-safety, make trees synchronized as well
trait SynchronizedOps extends internal.SymbolTable
with SynchronizedSymbols
with SynchronizedTypes { self: SymbolTable =>
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index c65357b652..3c2885a9f4 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -1,7 +1,7 @@
package scala.reflect
package runtime
-import internal.Flags.DEFERRED
+import scala.reflect.io.AbstractFile
trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
@@ -14,11 +14,11 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
synchronized { super.connectModuleToClass(m, moduleClass) }
- override def newFreeTermSymbol(name: TermName, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags setInfo info
+ override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags
- override def newFreeTypeSymbol(name: TypeName, info: Type, value: => Any, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- new FreeTypeSymbol(name, value, origin) with SynchronizedTypeSymbol initFlags flags setInfo info
+ override def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ new FreeTypeSymbol(name, origin) with SynchronizedTypeSymbol initFlags flags
override protected def makeNoSymbol: NoSymbol = new NoSymbol with SynchronizedSymbol
@@ -123,7 +123,7 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
override def associatedFile = synchronized { super.associatedFile }
- override def associatedFile_=(f: AbstractFileType) = synchronized { super.associatedFile_=(f) }
+ override def associatedFile_=(f: AbstractFile) = synchronized { super.associatedFile_=(f) }
override def thisSym: Symbol = synchronized { super.thisSym }
override def thisType: Type = synchronized { super.thisType }
override def typeOfThis: Type = synchronized { super.typeOfThis }
@@ -134,8 +134,6 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol {
override def sourceModule = synchronized { super.sourceModule }
- // [Eugene++ to Martin] doesn't override anything. no longer necessary?
- // def sourceModule_=(module: ModuleSymbol) = synchronized { super.sourceModule_=(module) }
override def implicitMembers: Scope = synchronized { super.implicitMembers }
}
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index e1eb7a57fe..b9b140a2fd 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -1,6 +1,9 @@
package scala.reflect
package runtime
+import scala.collection.mutable.WeakHashMap
+import java.lang.ref.WeakReference
+
/** This trait overrides methods in reflect.internal, bracketing
* them in synchronized { ... } to make them thread-safe
*/
@@ -11,18 +14,32 @@ trait SynchronizedTypes extends internal.Types { self: SymbolTable =>
private object uniqueLock
- override def unique[T <: Type](tp: T): T = uniqueLock.synchronized { super.unique(tp) }
+ private val uniques = WeakHashMap[Type, WeakReference[Type]]()
+ override def unique[T <: Type](tp: T): T = uniqueLock.synchronized {
+ // we need to have weak uniques for runtime reflection
+ // because unlike the normal compiler universe, reflective universe isn't organized in runs
+ // therefore perRunCaches can grow infinitely large
+ //
+ // despite that toolbox universes are decorated, toolboxes are compilers,
+ // i.e. they have their caches cleaned up automatically on per-run basis,
+ // therefore they should use vanilla uniques, which are faster
+ if (!isCompilerUniverse) {
+ val result = if (uniques contains tp) uniques(tp).get else null
+ if (result ne null) result.asInstanceOf[T]
+ else {
+ uniques(tp) = new WeakReference(tp)
+ tp
+ }
+ } else {
+ super.unique(tp)
+ }
+ }
class SynchronizedUndoLog extends UndoLog {
+ private val actualLock = new java.util.concurrent.locks.ReentrantLock
- override def clear() =
- synchronized { super.clear() }
-
- override def undo[T](block: => T): T =
- synchronized { super.undo(block) }
-
- override def undoUnless(block: => Boolean): Boolean =
- synchronized { super.undoUnless(block) }
+ final override def lock(): Unit = actualLock.lock()
+ final override def unlock(): Unit = actualLock.unlock()
}
override protected def newUndoLog = new SynchronizedUndoLog
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
index e2bf5773d2..05debcba65 100644
--- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala
+++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
@@ -1,7 +1,7 @@
package scala.reflect
package runtime
-import collection.mutable.WeakHashMap
+import scala.collection.mutable.WeakHashMap
import java.lang.ref.WeakReference
/** A cache that maintains a bijection between Java reflection type `J`
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index d00094c0c1..7b9f69e657 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -5,9 +5,8 @@ package object runtime {
// type is api.JavaUniverse because we only want to expose the `scala.reflect.api.*` subset of reflection
lazy val universe: api.JavaUniverse = new runtime.JavaUniverse
- // [Eugene++ to Martin] removed `mirrorOfLoader`, because one can use `universe.runtimeMirror` instead
-
- // implementation magically hardwired to the `currentMirror` method below
+ // implementation hardwired to the `currentMirror` method below
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
def currentMirror: universe.Mirror = ??? // macro
}
@@ -19,7 +18,7 @@ package runtime {
if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class")
val runtimeUniverse = Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("universe"))
val currentMirror = Apply(Select(runtimeUniverse, newTermName("runtimeMirror")), List(Select(runtimeClass, newTermName("getClassLoader"))))
- c.Expr[Nothing](currentMirror)(c.AbsTypeTag.Nothing)
+ c.Expr[Nothing](currentMirror)(c.WeakTypeTag.Nothing)
}
}
}
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 5ad82c513d..88ef8ae2a1 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -53,7 +53,7 @@ trait Commands extends Prop {
* takes the current abstract state as parameter and returns a boolean
* that says if the precondition is fulfilled or not. You can add several
* conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
+ val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
@@ -65,7 +65,7 @@ trait Commands extends Prop {
* method. The postcondition function should return a Boolean (or
* a Prop instance) that says if the condition holds or not. You can add several
* conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
+ val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
}
/** A command that binds its result for later use */
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index c40e4aa718..eeb5936086 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -96,7 +96,7 @@ object Pretty {
}
implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
+ def labels(ls: scala.collection.immutable.Set[String]) =
if(ls.isEmpty) ""
else "> Labels of failing property: " / ls.mkString("\n")
val s = res.status match {
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index 16ac1940b2..4683c34a65 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -30,7 +30,7 @@ trait CmdLineParser extends Parsers {
trait StrOpt extends Opt[String]
class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
+ private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
def apply(flag: Flag): Boolean = opts.contains(flag)
def apply[T](opt: Opt[T]): T = opts.get(opt) match {
case None => opt.default
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index f5db183abb..35de796727 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -13,7 +13,7 @@ import java.io._
class CodeWriter(writer: Writer) {
- private val nl = compat.Platform.EOL
+ private val nl = scala.compat.Platform.EOL
private var step = " "
private var level = 0
private var align = false
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index a8a9c65f63..a514f0d5a1 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -10,10 +10,10 @@ package scala.tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
import scala.reflect.NameTransformer
import scalax.rules.scalasig._
-import tools.nsc.util.{ ClassPath, JavaClassPath }
-import tools.util.PathResolver
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.tools.util.PathResolver
import ClassPath.DefaultJavaContext
-import tools.nsc.io.{ PlainFile, AbstractFile }
+import scala.tools.nsc.io.{ PlainFile, AbstractFile }
/**The main object used to execute scalap on the command-line.
*
@@ -97,9 +97,14 @@ class Main {
*/
def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = {
// find the classfile
- val encName = NameTransformer.encode(
- if (classname == "scala.AnyRef") "java.lang.Object"
- else classname)
+ val encName = classname match {
+ case "scala.AnyRef" => "java.lang.Object"
+ case _ =>
+ // we have to encode every fragment of a name separately, otherwise the NameTransformer
+ // will encode using unicode escaping dot separators as well
+ // we can afford allocations because this is not a performance critical code
+ classname.split('.').map(NameTransformer.encode).mkString(".")
+ }
val cls = path.findClass(encName)
if (cls.isDefined && cls.get.binary.isDefined) {
val cfile = cls.get.binary.get
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 34f52a1e19..51a789e041 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
/** Repeats this rule num times */
def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
- val result = new collection.mutable.ArraySeq[A](num)
+ val result = new scala.collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
index 324e87435e..b1cc18f90b 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/package.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
@@ -2,8 +2,8 @@ package scala.tools.scalap
package scalax
package object rules {
- implicit lazy val higherKinds = language.higherKinds
- implicit lazy val postfixOps = language.postfixOps
- implicit lazy val implicitConversions = language.implicitConversions
- implicit lazy val reflectiveCalls = language.reflectiveCalls
+ implicit lazy val higherKinds = scala.language.higherKinds
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
}
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 96530e2e94..45497665d7 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -14,8 +14,8 @@ package object swing {
type Image = java.awt.Image
type Font = java.awt.Font
- implicit lazy val reflectiveCalls = language.reflectiveCalls
- implicit lazy val implicitConversions = language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
+ implicit lazy val implicitConversions = scala.language.implicitConversions
private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
diff --git a/test/ant/test-basic/build.xml b/test/ant/test-basic/build.xml
new file mode 100644
index 0000000000..acc210806f
--- /dev/null
+++ b/test/ant/test-basic/build.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="test-simple" default="compile">
+ <description>Super simple test for Scala</description>
+
+ <target name="init">
+ <!-- Define project CLASSPATH. -->
+ <property name="base.dir" value="../../.."/>
+ <property name="pack.dir" value="${base.dir}/build/pack/"/>
+ <property name="build.dir" value="classes"/>
+ <property name="src.dir" value="src"/>
+ <property name="jvmargs" value=""/>
+ <path id="scala.classpath">
+ <fileset dir="${pack.dir}/lib/"> <include name="*.jar" /> </fileset>
+ </path>
+
+ <!-- Define scala compiler, scaladoc, etc command -->
+ <taskdef resource="scala/tools/ant/antlib.xml">
+ <classpath refid="scala.classpath" />
+ </taskdef>
+ </target>
+
+ <target name="compile" depends="init">
+ <mkdir dir="${build.dir}"/>
+
+ <scalac srcdir="${src.dir}" destdir="${build.dir}"
+ classpathref="scala.classpath" fork="true" target="jvm-1.5"
+ deprecation="no" addparams="-no-specialization"
+ jvmargs="${jvmargs} -XX:+UseConcMarkSweepGC">
+ <include name="**/*.scala"/>
+ </scalac>
+ </target>
+</project>
diff --git a/test/ant/test-basic/src/test-1.scala b/test/ant/test-basic/src/test-1.scala
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/ant/test-basic/src/test-1.scala
diff --git a/test/disabled-windows/script/utf8.bat b/test/disabled-windows/script/utf8.bat
index 8bc5c886f7..73c72569b3 100755
--- a/test/disabled-windows/script/utf8.bat
+++ b/test/disabled-windows/script/utf8.bat
@@ -1,11 +1,11 @@
-::#!
-:: utf8 - <description>.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-/*Comment Комментарий*/
-Console.println("QWERTY");
-Console.println("ЙЦУКЕН");
+::#!
+:: utf8 - <description>.
+
+@echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+/*Comment Комментарий*/
+Console.println("QWERTY");
+Console.println("ЙЦУКЕН");
diff --git a/test/disabled/script/fact.bat b/test/disabled/script/fact.bat
index bee0ba25c6..6f02b62a36 100755
--- a/test/disabled/script/fact.bat
+++ b/test/disabled/script/fact.bat
@@ -1,17 +1,17 @@
-::#!
-:: fact - A simple Scala batch file that prints out the factorial
-:: of the argument specified on the command line.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-
-val x = argv(0).toInt
-
-def fact(x: Int):Int =
- if(x==0) 1
- else x*fact(x-1)
-
-Console.println("fact(" + x + ") = " + fact(x))
+::#!
+:: fact - A simple Scala batch file that prints out the factorial
+:: of the argument specified on the command line.
+
+@echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+
+val x = argv(0).toInt
+
+def fact(x: Int):Int =
+ if(x==0) 1
+ else x*fact(x-1)
+
+Console.println("fact(" + x + ") = " + fact(x))
diff --git a/test/disabled/script/second.bat b/test/disabled/script/second.bat
index 0d7085954d..222372d543 100755
--- a/test/disabled/script/second.bat
+++ b/test/disabled/script/second.bat
@@ -1,3 +1,3 @@
-@echo off
-
-scala -nocompdaemon -e "println(\"My second argument is \" + args(1))" arg1 arg2
+@echo off
+
+scala -nocompdaemon -e "println(\"My second argument is \" + args(1))" arg1 arg2
diff --git a/test/disabled/script/t1015.bat b/test/disabled/script/t1015.bat
index 7475313d7e..4eddc800a8 100755
--- a/test/disabled/script/t1015.bat
+++ b/test/disabled/script/t1015.bat
@@ -1,12 +1,12 @@
-::#!
-:: t1015 - <description>.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-case class Test(one : Int, two : Int)
-object Test{
- def apply(one : Int): Test = Test(one, 2);
-}
+::#!
+:: t1015 - <description>.
+
+@echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+case class Test(one : Int, two : Int)
+object Test{
+ def apply(one : Int): Test = Test(one, 2);
+}
diff --git a/test/disabled/script/t1017.bat b/test/disabled/script/t1017.bat
index 369dbd2aca..0df49c663c 100755
--- a/test/disabled/script/t1017.bat
+++ b/test/disabled/script/t1017.bat
@@ -1,15 +1,15 @@
-::#!
-::# t1017 - <description>.
-
-@echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-def foo = {
- bar
-}
-
-var x = 1
-
-def bar = 1
+::#!
+::# t1017 - <description>.
+
+@echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+def foo = {
+ bar
+}
+
+var x = 1
+
+def bar = 1
diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check
index c109800d9c..5c6326d59f 100644
--- a/test/files/buildmanager/t2650_3/t2650_3.check
+++ b/test/files/buildmanager/t2650_3/t2650_3.check
@@ -10,6 +10,5 @@ B.scala:2: error: type mismatch;
found : a.T
(which expands to) Long
required: Int
- possible cause: missing arguments for method or constructor
def x(a: A): Int = a.x
^
diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check
index 89536776bd..a4aeaddfbb 100644
--- a/test/files/buildmanager/t2650_4/t2650_4.check
+++ b/test/files/buildmanager/t2650_4/t2650_4.check
@@ -10,6 +10,5 @@ B.scala:2: error: type mismatch;
found : a.T2
(which expands to) Long
required: Int
- possible cause: missing arguments for method or constructor
def x(a: A): Int = a.x
^
diff --git a/test/files/buildmanager/overloaded_1/A.scala b/test/files/disabled/A.scala
index 33b63b8006..c070faf978 100644
--- a/test/files/buildmanager/overloaded_1/A.scala
+++ b/test/files/disabled/A.scala
@@ -3,7 +3,7 @@ trait As {
override def foo = this /// Shouldn't cause the change
override def foo(act: List[D]) = this
}
-
+
abstract class D{
def foo: D = this
def foo(act: List[D]) = this
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.check b/test/files/disabled/overloaded_1.check
index 4d643ce6b4..4d643ce6b4 100644
--- a/test/files/buildmanager/overloaded_1/overloaded_1.check
+++ b/test/files/disabled/overloaded_1.check
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.test b/test/files/disabled/overloaded_1.test
index 392e0d365f..392e0d365f 100644
--- a/test/files/buildmanager/overloaded_1/overloaded_1.test
+++ b/test/files/disabled/overloaded_1.test
diff --git a/test/files/buildmanager/t4245/A.scala b/test/files/disabled/t4245/A.scala
index 7c4efe1b4b..7c4efe1b4b 100644
--- a/test/files/buildmanager/t4245/A.scala
+++ b/test/files/disabled/t4245/A.scala
diff --git a/test/files/buildmanager/t4245/t4245.check b/test/files/disabled/t4245/t4245.check
index 3d3898c671..3d3898c671 100644
--- a/test/files/buildmanager/t4245/t4245.check
+++ b/test/files/disabled/t4245/t4245.check
diff --git a/test/files/buildmanager/t4245/t4245.test b/test/files/disabled/t4245/t4245.test
index 392e0d365f..392e0d365f 100644
--- a/test/files/buildmanager/t4245/t4245.test
+++ b/test/files/disabled/t4245/t4245.test
diff --git a/test/files/jvm/actmig-PinS.scala b/test/files/jvm/actmig-PinS.scala
index 39f8f04b3b..3f07fab12e 100644
--- a/test/files/jvm/actmig-PinS.scala
+++ b/test/files/jvm/actmig-PinS.scala
@@ -1,5 +1,9 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
import scala.actors._
-import scala.concurrent.util.duration._
+import scala.concurrent.duration._
import scala.concurrent.{ Promise, Await }
import scala.actors.Actor._
diff --git a/test/files/jvm/actmig-PinS_1.scala b/test/files/jvm/actmig-PinS_1.scala
index 1fb50567b9..876688ca75 100644
--- a/test/files/jvm/actmig-PinS_1.scala
+++ b/test/files/jvm/actmig-PinS_1.scala
@@ -1,5 +1,10 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
import scala.actors._
-import scala.concurrent.util.duration._
+import scala.actors.migration._
+import scala.concurrent.duration._
import scala.concurrent.{ Promise, Await }
object SillyActor {
diff --git a/test/files/jvm/actmig-PinS_2.scala b/test/files/jvm/actmig-PinS_2.scala
index 46277efd43..7d12578f71 100644
--- a/test/files/jvm/actmig-PinS_2.scala
+++ b/test/files/jvm/actmig-PinS_2.scala
@@ -1,5 +1,10 @@
-import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Props, Exit }
-import scala.concurrent.util.duration._
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors._
+import scala.actors.migration._
+import scala.concurrent.duration._
import scala.concurrent.{ Promise, Await }
object SillyActor {
diff --git a/test/files/jvm/actmig-PinS_3.scala b/test/files/jvm/actmig-PinS_3.scala
index 321e99b1c2..c2943008b0 100644
--- a/test/files/jvm/actmig-PinS_3.scala
+++ b/test/files/jvm/actmig-PinS_3.scala
@@ -1,5 +1,10 @@
-import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Terminated, Props }
-import scala.concurrent.util.duration._
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors._
+import scala.actors.migration._
+import scala.concurrent.duration._
import scala.concurrent.{ Promise, Await }
@@ -158,4 +163,4 @@ object Test extends App {
stash(m)
}
}, "default-stash-dispatcher"))
-} \ No newline at end of file
+}
diff --git a/test/files/jvm/actmig-hierarchy.scala b/test/files/jvm/actmig-hierarchy.scala
new file mode 100644
index 0000000000..17a44fda7a
--- /dev/null
+++ b/test/files/jvm/actmig-hierarchy.scala
@@ -0,0 +1,47 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors._
+
+
+class ReactorActor extends Reactor[String] {
+ def act() {
+ var cond = true
+ loopWhile(cond) {
+ react {
+ case x if x == "hello1" => println("hello")
+ case "exit" => cond = false
+ }
+ }
+ }
+}
+
+class ReplyActor extends ReplyReactor {
+ def act() {
+ var cond = true
+ loopWhile(cond) {
+ react {
+ case "hello" => println("hello")
+ case "exit" => cond = false;
+ }
+ }
+ }
+}
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val reactorActor = new ReactorActor
+ val replyActor = new ReplyActor
+ reactorActor.start()
+ replyActor.start()
+
+ reactorActor ! "hello1"
+ replyActor ! "hello"
+
+ reactorActor ! "exit"
+ replyActor ! "exit"
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/actmig-hierarchy_1.scala b/test/files/jvm/actmig-hierarchy_1.scala
new file mode 100644
index 0000000000..14f03c9d48
--- /dev/null
+++ b/test/files/jvm/actmig-hierarchy_1.scala
@@ -0,0 +1,45 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors._
+
+class ReactorActor extends Actor {
+ def act() {
+ var cond = true
+ loopWhile(cond) {
+ react {
+ case x: String if x == "hello1" => println("hello")
+ case "exit" => cond = false
+ }
+ }
+ }
+}
+
+class ReplyActor extends Actor {
+ def act() {
+ var cond = true
+ loopWhile(cond) {
+ react {
+ case "hello" => println("hello")
+ case "exit" => cond = false;
+ }
+ }
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ val reactorActor = new ReactorActor
+ val replyActor = new ReplyActor
+ reactorActor.start()
+ replyActor.start()
+
+ reactorActor ! "hello1"
+ replyActor ! "hello"
+
+ reactorActor ! "exit"
+ replyActor ! "exit"
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/actmig-instantiation.scala b/test/files/jvm/actmig-instantiation.scala
new file mode 100644
index 0000000000..d54dff9558
--- /dev/null
+++ b/test/files/jvm/actmig-instantiation.scala
@@ -0,0 +1,96 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors.migration.MigrationSystem._
+import scala.actors.migration._
+import scala.actors.Actor._
+import scala.actors._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+
+class TestStashingActor extends StashingActor {
+
+ def receive = { case v: Int => Test.append(v); Test.latch.countDown() }
+
+}
+
+object Test {
+ val NUMBER_OF_TESTS = 5
+
+ // used for sorting non-deterministic output
+ val buff = ArrayBuffer[Int](0)
+ val latch = new CountDownLatch(NUMBER_OF_TESTS)
+ val toStop = ArrayBuffer[ActorRef]()
+
+ def append(v: Int) = synchronized {
+ buff += v
+ }
+
+ def main(args: Array[String]) = {
+ // plain scala actor
+ val a1 = actor {
+ react { case v: Int => Test.append(v); Test.latch.countDown() }
+ }
+ a1 ! 100
+
+ // simple instantiation
+ val a2 = MigrationSystem.actorOf(Props(() => new TestStashingActor, "akka.actor.default-stash-dispatcher"))
+ a2 ! 200
+ toStop += a2
+
+ // actor of with scala actor
+ val a3 = MigrationSystem.actorOf(Props(() => actor {
+ react { case v: Int => Test.append(v); Test.latch.countDown() }
+ }, "akka.actor.default-stash-dispatcher"))
+ a3 ! 300
+
+ // using the manifest
+ val a4 = MigrationSystem.actorOf(Props(() => new TestStashingActor, "akka.actor.default-stash-dispatcher"))
+ a4 ! 400
+ toStop += a4
+
+ // deterministic part of a test
+ // creation without actorOf
+ try {
+ val a3 = new TestStashingActor
+ a3 ! -1
+ } catch {
+ case e => println("OK error: " + e)
+ }
+
+ // actorOf double creation
+ try {
+ val a3 = MigrationSystem.actorOf(Props(() => {
+ new TestStashingActor
+ new TestStashingActor
+ }, "akka.actor.default-stash-dispatcher"))
+ a3 ! -1
+ } catch {
+ case e => println("OK error: " + e)
+ }
+
+ // actorOf nesting
+ try {
+ val a5 = MigrationSystem.actorOf(Props(() => {
+ val a6 = MigrationSystem.actorOf(Props(() => new TestStashingActor, "akka.actor.default-stash-dispatcher"))
+ toStop += a6
+ new TestStashingActor
+ }, "akka.actor.default-stash-dispatcher"))
+
+ a5 ! 500
+ toStop += a5
+ } catch {
+ case e => println("Should not throw an exception: " + e)
+ }
+
+ // output
+ latch.await(5, TimeUnit.SECONDS)
+ if (latch.getCount() > 0) {
+ println("Error: Tasks have not finished!!!")
+ }
+
+ buff.sorted.foreach(println)
+ toStop.foreach(_ ! PoisonPill)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/actmig-loop-react.check b/test/files/jvm/actmig-loop-react.check
index 54cbe942c0..2474cbe71b 100644
--- a/test/files/jvm/actmig-loop-react.check
+++ b/test/files/jvm/actmig-loop-react.check
@@ -13,3 +13,4 @@ after react
do task 1
do string I am a String
do task 42
+after react
diff --git a/test/files/jvm/actmig-loop-react.scala b/test/files/jvm/actmig-loop-react.scala
new file mode 100644
index 0000000000..7f4c6f96dc
--- /dev/null
+++ b/test/files/jvm/actmig-loop-react.scala
@@ -0,0 +1,196 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors.migration.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors._
+import scala.actors.migration._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.duration._
+import scala.concurrent.{ Promise, Await }
+
+object Test {
+ val finishedLWCR, finishedTNR, finishedEH = Promise[Boolean]
+ val finishedLWCR1, finishedTNR1, finishedEH1 = Promise[Boolean]
+
+ def testLoopWithConditionReact() = {
+ // Snippet showing composition of receives
+ // Loop with Condition Snippet - before
+ val myActor = actor {
+ var c = true
+ loopWhile(c) {
+ react {
+ case x: Int =>
+ // do task
+ println("do task")
+ if (x == 42) {
+ c = false
+ finishedLWCR1.success(true)
+ }
+ }
+ }
+ }
+
+ myActor.start()
+ myActor ! 1
+ myActor ! 42
+
+ Await.ready(finishedLWCR1.future, 5 seconds)
+
+ // Loop with Condition Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task")
+ if (x == 42) {
+ finishedLWCR.success(true)
+ context.stop(self)
+ }
+ }
+ }, "default-stashing-dispatcher"))
+ myAkkaActor ! 1
+ myAkkaActor ! 42
+ }
+
+ def testNestedReact() = {
+ // Snippet showing composition of receives
+ // Loop with Condition Snippet - before
+ val myActor = actor {
+ var c = true
+ loopWhile(c) {
+ react {
+ case x: Int =>
+ // do task
+ println("do task " + x)
+ if (x == 42) {
+ c = false
+ } else {
+ react {
+ case y: String =>
+ println("do string " + y)
+ }
+ }
+ println("after react")
+ finishedTNR1.success(true)
+ }
+ }
+ }
+ myActor.start()
+
+ myActor ! 1
+ myActor ! "I am a String"
+ myActor ! 42
+
+ Await.ready(finishedTNR1.future, 5 seconds)
+
+ // Loop with Condition Snippet - migrated
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = {
+ case x: Int =>
+ // do task
+ println("do task " + x)
+ if (x == 42) {
+ println("after react")
+ finishedTNR.success(true)
+ context.stop(self)
+ } else
+ context.become(({
+ case y: String =>
+ println("do string " + y)
+ }: Receive).andThen(x => {
+ unstashAll()
+ context.unbecome()
+ }).orElse { case x => stash() })
+ }
+ }, "default-stashing-dispatcher"))
+
+ myAkkaActor ! 1
+ myAkkaActor ! "I am a String"
+ myAkkaActor ! 42
+
+ }
+
+ def exceptionHandling() = {
+ // Stashing actor with act and exception handler
+ val myActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+
+ def receive = { case _ => println("Dummy method.") }
+ override def act() = {
+ loop {
+ react {
+ case "fail" =>
+ throw new Exception("failed")
+ case "work" =>
+ println("working")
+ case "die" =>
+ finishedEH1.success(true)
+ exit()
+ }
+ }
+ }
+
+ override def exceptionHandler = {
+ case x: Exception => println("scala got exception")
+ }
+
+ }, "default-stashing-dispatcher"))
+
+ myActor ! "work"
+ myActor ! "fail"
+ myActor ! "die"
+
+ Await.ready(finishedEH1.future, 5 seconds)
+ // Stashing actor in Akka style
+ val myAkkaActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ def receive = PFCatch({
+ case "fail" =>
+ throw new Exception("failed")
+ case "work" =>
+ println("working")
+ case "die" =>
+ finishedEH.success(true)
+ context.stop(self)
+ }, { case x: Exception => println("akka got exception") })
+ }, "default-stashing-dispatcher"))
+
+ myAkkaActor ! "work"
+ myAkkaActor ! "fail"
+ myAkkaActor ! "die"
+ }
+
+ def main(args: Array[String]) = {
+ testLoopWithConditionReact()
+ Await.ready(finishedLWCR.future, 5 seconds)
+ exceptionHandling()
+ Await.ready(finishedEH.future, 5 seconds)
+ testNestedReact()
+ Await.ready(finishedTNR.future, 5 seconds)
+ }
+
+}
+
+// As per Jim Mcbeath's blog (http://jim-mcbeath.blogspot.com/2008/07/actor-exceptions.html)
+class PFCatch(f: PartialFunction[Any, Unit],
+ handler: PartialFunction[Exception, Unit])
+ extends PartialFunction[Any, Unit] {
+
+ def apply(x: Any) = {
+ try {
+ f(x)
+ } catch {
+ case e: Exception if handler.isDefinedAt(e) => handler(e)
+ }
+ }
+
+ def isDefinedAt(x: Any) = f.isDefinedAt(x)
+}
+
+object PFCatch {
+ def apply(f: PartialFunction[Any, Unit],
+ handler: PartialFunction[Exception, Unit]) = new PFCatch(f, handler)
+}
diff --git a/test/files/jvm/actmig-public-methods.check b/test/files/jvm/actmig-public-methods.check
index bb6530c926..c861c90e63 100644
--- a/test/files/jvm/actmig-public-methods.check
+++ b/test/files/jvm/actmig-public-methods.check
@@ -1,6 +1,6 @@
None
Some(bang qmark after 1)
bang
+bang bang in the future after 0
bang qmark after 0
-bang qmark in future after 0
-typed bang qmark in future after 0
+typed bang bang in the future after 0
diff --git a/test/files/jvm/actmig-public-methods.scala b/test/files/jvm/actmig-public-methods.scala
new file mode 100644
index 0000000000..58d7a1a9d4
--- /dev/null
+++ b/test/files/jvm/actmig-public-methods.scala
@@ -0,0 +1,74 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.collection.mutable.ArrayBuffer
+import scala.actors.Actor._
+import scala.actors._
+import scala.actors.migration.MigrationSystem
+import scala.util.continuations._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+
+object Test {
+ val NUMBER_OF_TESTS = 6
+
+ // used for sorting non-deterministic output
+ val buff = ArrayBuffer[String]()
+ val latch = new CountDownLatch(NUMBER_OF_TESTS)
+ val toStop = ArrayBuffer[Actor]()
+
+ def append(v: String) = synchronized {
+ buff += v
+ }
+
+ def main(args: Array[String]) = {
+
+ val respActor = actor {
+ loop {
+ react {
+ case (x: String, time: Long) =>
+ Thread.sleep(time)
+ reply(x + " after " + time)
+ case str: String =>
+ append(str)
+ latch.countDown()
+ case _ => exit()
+ }
+ }
+ }
+
+ toStop += respActor
+
+ respActor ! ("bang")
+
+ val res1 = respActor !? (("bang qmark", 0L))
+ append(res1.toString)
+ latch.countDown()
+
+ val res2 = respActor !? (5000, ("bang qmark", 1L))
+ append(res2.toString)
+ latch.countDown()
+
+ // this one should timeout
+ val res21 = respActor !? (1, ("bang qmark", 5000L))
+ append(res21.toString)
+ latch.countDown()
+
+ val fut1 = respActor !! (("bang bang in the future", 0L))
+ append(fut1().toString())
+ latch.countDown()
+
+ val fut2 = respActor !! (("typed bang bang in the future", 0L), { case x: String => x })
+ append(fut2())
+ latch.countDown()
+
+ // output
+ latch.await(10, TimeUnit.SECONDS)
+ if (latch.getCount() > 0) {
+ println("Error: Tasks have not finished!!!")
+ }
+
+ buff.sorted.foreach(println)
+ toStop.foreach(_ ! 'stop)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/actmig-public-methods_1.check b/test/files/jvm/actmig-public-methods_1.check
index bb6530c926..c861c90e63 100644
--- a/test/files/jvm/actmig-public-methods_1.check
+++ b/test/files/jvm/actmig-public-methods_1.check
@@ -1,6 +1,6 @@
None
Some(bang qmark after 1)
bang
+bang bang in the future after 0
bang qmark after 0
-bang qmark in future after 0
-typed bang qmark in future after 0
+typed bang bang in the future after 0
diff --git a/test/files/jvm/actmig-public-methods_1.scala b/test/files/jvm/actmig-public-methods_1.scala
index 7e5bc24210..15516a5d51 100644
--- a/test/files/jvm/actmig-public-methods_1.scala
+++ b/test/files/jvm/actmig-public-methods_1.scala
@@ -1,10 +1,18 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
import scala.collection.mutable.ArrayBuffer
import scala.actors.Actor._
import scala.actors._
+import scala.actors.migration._
import scala.util._
+import scala.concurrent._
+import scala.concurrent.duration._
import java.util.concurrent.{ TimeUnit, CountDownLatch }
-import scala.concurrent.util.Duration
-import scala.actors.pattern._
+import scala.concurrent.duration._
+import scala.actors.migration.pattern._
+import scala.concurrent.ExecutionContext.Implicits.global
object Test {
val NUMBER_OF_TESTS = 6
@@ -39,45 +47,53 @@ object Test {
respActor ! "bang"
- implicit val timeout = Timeout(Duration(500, TimeUnit.MILLISECONDS))
- val msg = ("bang qmark", 0L)
- val res1 = respActor.?(msg)(Timeout(Duration.Inf))
- append(res1().toString)
- latch.countDown()
-
- val msg1 = ("bang qmark", 1L)
- val res2 = respActor.?(msg1)(Timeout(Duration(500, TimeUnit.MILLISECONDS)))
- append((res2() match {
- case x: AskTimeoutException => None
- case v => Some(v)
- }).toString)
- latch.countDown()
-
- // this one should time out
- val msg11 = ("bang qmark", 500L)
- val res21 = respActor.?(msg11)(Timeout(Duration(1, TimeUnit.MILLISECONDS)))
- append((res21() match {
- case x: AskTimeoutException => None
- case v => Some(v)
- }).toString)
- latch.countDown()
-
- val msg2 = ("bang qmark in future", 0L)
- val fut1 = respActor.?(msg2)(Duration.Inf)
- append(fut1().toString())
- latch.countDown()
-
- val handler: PartialFunction[Any, String] = {
- case x: String => x.toString
+ {
+ val msg = ("bang qmark", 0L)
+ val res = respActor.?(msg)(Timeout(Duration.Inf))
+ append(Await.result(res, Duration.Inf).toString)
+ latch.countDown()
}
- val msg3 = ("typed bang qmark in future", 0L)
- val fut2 = (respActor.?(msg3)(Duration.Inf))
- append(Futures.future { handler.apply(fut2()) }().toString)
- latch.countDown()
+ {
+ val msg = ("bang qmark", 1L)
+ val res = respActor.?(msg)(Timeout(5 seconds))
+
+ val promise = Promise[Option[Any]]()
+ res.onComplete(v => promise.success(v.toOption))
+ append(Await.result(promise.future, Duration.Inf).toString)
+
+ latch.countDown()
+ }
+
+ {
+ val msg = ("bang qmark", 5000L)
+ val res = respActor.?(msg)(Timeout(1 millisecond))
+ val promise = Promise[Option[Any]]()
+ res.onComplete(v => promise.success(v.toOption))
+ append(Await.result(promise.future, Duration.Inf).toString)
+ latch.countDown()
+ }
+
+ {
+ val msg = ("bang bang in the future", 0L)
+ val fut = respActor.?(msg)(Timeout(Duration.Inf))
+ append(Await.result(fut, Duration.Inf).toString)
+ latch.countDown()
+ }
+
+ {
+ val handler: PartialFunction[Any, String] = {
+ case x: String => x
+ }
+
+ val msg = ("typed bang bang in the future", 0L)
+ val fut = (respActor.?(msg)(Timeout(Duration.Inf)))
+ append((Await.result(fut.map(handler), Duration.Inf)).toString)
+ latch.countDown()
+ }
// output
- latch.await(200, TimeUnit.MILLISECONDS)
+ latch.await(10, TimeUnit.SECONDS)
if (latch.getCount() > 0) {
println("Error: Tasks have not finished!!!")
}
diff --git a/test/files/jvm/actmig-react-receive.scala b/test/files/jvm/actmig-react-receive.scala
index 8464a2af79..6adeac8b52 100644
--- a/test/files/jvm/actmig-react-receive.scala
+++ b/test/files/jvm/actmig-react-receive.scala
@@ -1,9 +1,14 @@
-import scala.actors.MigrationSystem._
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors.migration.MigrationSystem._
import scala.actors.Actor._
-import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill }
+import scala.actors._
+import scala.actors.migration._
import java.util.concurrent.{ TimeUnit, CountDownLatch }
import scala.collection.mutable.ArrayBuffer
-import scala.concurrent.util.duration._
+import scala.concurrent.duration._
import scala.concurrent.{ Promise, Await }
object Test {
diff --git a/test/files/jvm/actmig-react-within.check b/test/files/jvm/actmig-react-within.check
new file mode 100644
index 0000000000..57798dbefb
--- /dev/null
+++ b/test/files/jvm/actmig-react-within.check
@@ -0,0 +1,2 @@
+received
+received
diff --git a/test/files/jvm/actmig-react-within.scala b/test/files/jvm/actmig-react-within.scala
new file mode 100644
index 0000000000..43350ef120
--- /dev/null
+++ b/test/files/jvm/actmig-react-within.scala
@@ -0,0 +1,48 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors.migration.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors._
+import scala.actors.migration._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.duration._
+import scala.concurrent.{ Promise, Await }
+
+object Test {
+ val finished = Promise[Boolean]
+
+ def testReactWithin() = {
+ val sActor = actor {
+ loop {
+ reactWithin(1) {
+ case scala.actors.TIMEOUT =>
+ println("received")
+ exit()
+ case _ =>
+ println("Should not occur.")
+ }
+ }
+ }
+
+ val myActor = MigrationSystem.actorOf(Props(() => new StashingActor {
+ context.setReceiveTimeout(1 millisecond)
+ def receive = {
+ case ReceiveTimeout =>
+ println("received")
+ finished.success(true)
+ context.stop(self)
+ case _ =>
+ println("Should not occur.")
+ }
+ }, "default-stashing-dispatcher"))
+ }
+
+ def main(args: Array[String]) = {
+ testReactWithin()
+ Await.ready(finished.future, 5 seconds)
+ }
+
+}
diff --git a/test/files/jvm/actmig-receive.check b/test/files/jvm/actmig-receive.check
new file mode 100644
index 0000000000..30886140e1
--- /dev/null
+++ b/test/files/jvm/actmig-receive.check
@@ -0,0 +1,27 @@
+Original
+do before
+receive 1
+do in between
+receive 1
+do after
+Transformed
+do before
+receive 1
+do in between
+receive 1
+do after
+Test Loop Receive
+Original
+do before body
+receive 1
+do after receive
+do before body
+do after receive
+after loop
+Transformed
+do before body
+receive 1
+do after receive
+do before body
+do after receive
+after loop
diff --git a/test/files/jvm/actmig-receive.scala b/test/files/jvm/actmig-receive.scala
new file mode 100644
index 0000000000..03dc1be63b
--- /dev/null
+++ b/test/files/jvm/actmig-receive.scala
@@ -0,0 +1,120 @@
+/**
+ * NOTE: Code snippets from this test are included in the Actor Migration Guide. In case you change
+ * code in these tests prior to the 2.10.0 release please send the notification to @vjovanov.
+ */
+import scala.actors.migration.MigrationSystem._
+import scala.actors.Actor._
+import scala.actors._
+import scala.actors.migration._
+import java.util.concurrent.{ TimeUnit, CountDownLatch }
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.duration._
+import scala.concurrent.{ Promise, Await }
+
+object Test {
+ val finishedSingle, finishedSingle1, finishedLoop, finishedLoop1 = Promise[Boolean]
+
+ def testDoubleReceive() = {
+ println("Original")
+ // Snippet that shows how to get rid of receive calls in Scala Actors.
+ // This snippet is used in the Actors Migration Kit.
+ val myActor = actor {
+ println("do before")
+ receive {
+ case "hello" =>
+ println("receive 1")
+ }
+ println("do in between")
+ receive {
+ case "hello" =>
+ println("receive 1")
+ }
+ println("do after")
+ finishedSingle.success(true)
+ }
+
+ myActor ! "hello"
+ myActor ! "hello"
+
+ Await.ready(finishedSingle.future, 5 seconds)
+ println("Transformed")
+ val myActorReact = actor {
+ println("do before")
+ react (({
+ case "hello" =>
+ println("receive 1")
+ }: PartialFunction[Any, Unit]).andThen { x =>
+ println("do in between")
+ react (({
+ case "hello" =>
+ println("receive 1")
+ }: PartialFunction[Any, Unit]).andThen { x =>
+ println("do after")
+ finishedSingle1.success(true)
+ })
+ })
+ }
+
+ myActorReact ! "hello"
+ myActorReact ! "hello"
+
+ Await.ready(finishedSingle1.future, 5 seconds)
+ }
+
+ def testLoopReceive() = {
+ println("Test Loop Receive")
+ // Snippet that shows how to get rid of receive calls in loops.
+ // This snippet is used in the Actors Migration Kit.
+ println("Original")
+ val myActor = actor {
+ var c = true
+ while (c) {
+ println("do before body")
+ receive {
+ case "hello" =>
+ println("receive 1")
+ case "exit" =>
+ c = false
+ }
+ println("do after receive")
+ }
+ println("after loop")
+ finishedLoop.success(true)
+ }
+
+ myActor ! "hello"
+ myActor ! "exit"
+ Await.ready(finishedLoop.future, 5 seconds)
+ println("Transformed")
+
+ val myActorReact = actor {
+ var c = true
+ loopWhile(c) {
+ println("do before body")
+ react (({
+ case "hello" =>
+ println("receive 1")
+ case "exit" =>
+ c = false
+ }: PartialFunction[Any, Unit]).andThen { x =>
+ println("do after receive")
+ if (c == false) {
+ println("after loop")
+ finishedLoop1.success(true)
+ }
+ })
+ }
+ }
+
+ myActorReact ! "hello"
+ myActorReact ! "exit"
+
+ Await.ready(finishedLoop1.future, 5 seconds)
+ }
+
+ def main(args: Array[String]) = {
+ testDoubleReceive()
+ testLoopReceive()
+ }
+
+}
diff --git a/test/files/jvm/duration-java.check b/test/files/jvm/duration-java.check
new file mode 100644
index 0000000000..49d06fbe93
--- /dev/null
+++ b/test/files/jvm/duration-java.check
@@ -0,0 +1,364 @@
+ 0.0 nanoseconds => 0 days
+ 1.0 nanoseconds => 1 nanosecond
+ 7.0 nanoseconds => 7 nanoseconds
+ 10.0 nanoseconds => 10 nanoseconds
+ 12.0 nanoseconds => 12 nanoseconds
+ 24.0 nanoseconds => 24 nanoseconds
+ 30.0 nanoseconds => 30 nanoseconds
+ 49.0 nanoseconds => 49 nanoseconds
+ 60.0 nanoseconds => 60 nanoseconds
+ 70.0 nanoseconds => 70 nanoseconds
+ 84.0 nanoseconds => 84 nanoseconds
+ 100.0 nanoseconds => 100 nanoseconds
+ 120.0 nanoseconds => 120 nanoseconds
+ 144.0 nanoseconds => 144 nanoseconds
+ 168.0 nanoseconds => 168 nanoseconds
+ 210.0 nanoseconds => 210 nanoseconds
+ 240.0 nanoseconds => 240 nanoseconds
+ 288.0 nanoseconds => 288 nanoseconds
+ 300.0 nanoseconds => 300 nanoseconds
+ 360.0 nanoseconds => 360 nanoseconds
+ 420.0 nanoseconds => 420 nanoseconds
+ 576.0 nanoseconds => 576 nanoseconds
+ 600.0 nanoseconds => 600 nanoseconds
+ 700.0 nanoseconds => 700 nanoseconds
+ 720.0 nanoseconds => 720 nanoseconds
+ 900.0 nanoseconds => 900 nanoseconds
+ 1000.0 nanoseconds => 1 microsecond
+ 1200.0 nanoseconds => 1200 nanoseconds
+ 1440.0 nanoseconds => 1440 nanoseconds
+ 1800.0 nanoseconds => 1800 nanoseconds
+ 2400.0 nanoseconds => 2400 nanoseconds
+ 3000.0 nanoseconds => 3 microseconds
+ 3600.0 nanoseconds => 3600 nanoseconds
+ 6000.0 nanoseconds => 6 microseconds
+ 7000.0 nanoseconds => 7 microseconds
+ 10000.0 nanoseconds => 10 microseconds
+ 12000.0 nanoseconds => 12 microseconds
+ 24000.0 nanoseconds => 24 microseconds
+ 30000.0 nanoseconds => 30 microseconds
+ 60000.0 nanoseconds => 60 microseconds
+ 100000.0 nanoseconds => 100 microseconds
+ 1000000.0 nanoseconds => 1 millisecond
+ 7000000.0 nanoseconds => 7 milliseconds
+ 1.0E7 nanoseconds => 10 milliseconds
+ 1.2E7 nanoseconds => 12 milliseconds
+ 2.4E7 nanoseconds => 24 milliseconds
+ 3.0E7 nanoseconds => 30 milliseconds
+ 6.0E7 nanoseconds => 60 milliseconds
+ 1.0E8 nanoseconds => 100 milliseconds
+ 1.0E9 nanoseconds => 1 second
+ 1.0E12 nanoseconds => 1000 seconds
+ 0.0 microseconds => 0 days
+ 1.0 microseconds => 1 microsecond
+ 7.0 microseconds => 7 microseconds
+ 10.0 microseconds => 10 microseconds
+ 12.0 microseconds => 12 microseconds
+ 24.0 microseconds => 24 microseconds
+ 30.0 microseconds => 30 microseconds
+ 49.0 microseconds => 49 microseconds
+ 60.0 microseconds => 60 microseconds
+ 70.0 microseconds => 70 microseconds
+ 84.0 microseconds => 84 microseconds
+ 100.0 microseconds => 100 microseconds
+ 120.0 microseconds => 120 microseconds
+ 144.0 microseconds => 144 microseconds
+ 168.0 microseconds => 168 microseconds
+ 210.0 microseconds => 210 microseconds
+ 240.0 microseconds => 240 microseconds
+ 288.0 microseconds => 288 microseconds
+ 300.0 microseconds => 300 microseconds
+ 360.0 microseconds => 360 microseconds
+ 420.0 microseconds => 420 microseconds
+ 576.0 microseconds => 576 microseconds
+ 600.0 microseconds => 600 microseconds
+ 700.0 microseconds => 700 microseconds
+ 720.0 microseconds => 720 microseconds
+ 900.0 microseconds => 900 microseconds
+ 1000.0 microseconds => 1 millisecond
+ 1200.0 microseconds => 1200 microseconds
+ 1440.0 microseconds => 1440 microseconds
+ 1800.0 microseconds => 1800 microseconds
+ 2400.0 microseconds => 2400 microseconds
+ 3000.0 microseconds => 3 milliseconds
+ 3600.0 microseconds => 3600 microseconds
+ 6000.0 microseconds => 6 milliseconds
+ 7000.0 microseconds => 7 milliseconds
+ 10000.0 microseconds => 10 milliseconds
+ 12000.0 microseconds => 12 milliseconds
+ 24000.0 microseconds => 24 milliseconds
+ 30000.0 microseconds => 30 milliseconds
+ 60000.0 microseconds => 60 milliseconds
+ 100000.0 microseconds => 100 milliseconds
+ 1000000.0 microseconds => 1 second
+ 7000000.0 microseconds => 7 seconds
+ 1.0E7 microseconds => 10 seconds
+ 1.2E7 microseconds => 12 seconds
+ 2.4E7 microseconds => 24 seconds
+ 3.0E7 microseconds => 30 seconds
+ 6.0E7 microseconds => 1 minute
+ 1.0E8 microseconds => 100 seconds
+ 1.0E9 microseconds => 1000 seconds
+ 1.0E12 microseconds => 1000000 seconds
+ 0.0 milliseconds => 0 days
+ 1.0 milliseconds => 1 millisecond
+ 7.0 milliseconds => 7 milliseconds
+ 10.0 milliseconds => 10 milliseconds
+ 12.0 milliseconds => 12 milliseconds
+ 24.0 milliseconds => 24 milliseconds
+ 30.0 milliseconds => 30 milliseconds
+ 49.0 milliseconds => 49 milliseconds
+ 60.0 milliseconds => 60 milliseconds
+ 70.0 milliseconds => 70 milliseconds
+ 84.0 milliseconds => 84 milliseconds
+ 100.0 milliseconds => 100 milliseconds
+ 120.0 milliseconds => 120 milliseconds
+ 144.0 milliseconds => 144 milliseconds
+ 168.0 milliseconds => 168 milliseconds
+ 210.0 milliseconds => 210 milliseconds
+ 240.0 milliseconds => 240 milliseconds
+ 288.0 milliseconds => 288 milliseconds
+ 300.0 milliseconds => 300 milliseconds
+ 360.0 milliseconds => 360 milliseconds
+ 420.0 milliseconds => 420 milliseconds
+ 576.0 milliseconds => 576 milliseconds
+ 600.0 milliseconds => 600 milliseconds
+ 700.0 milliseconds => 700 milliseconds
+ 720.0 milliseconds => 720 milliseconds
+ 900.0 milliseconds => 900 milliseconds
+ 1000.0 milliseconds => 1 second
+ 1200.0 milliseconds => 1200 milliseconds
+ 1440.0 milliseconds => 1440 milliseconds
+ 1800.0 milliseconds => 1800 milliseconds
+ 2400.0 milliseconds => 2400 milliseconds
+ 3000.0 milliseconds => 3 seconds
+ 3600.0 milliseconds => 3600 milliseconds
+ 6000.0 milliseconds => 6 seconds
+ 7000.0 milliseconds => 7 seconds
+ 10000.0 milliseconds => 10 seconds
+ 12000.0 milliseconds => 12 seconds
+ 24000.0 milliseconds => 24 seconds
+ 30000.0 milliseconds => 30 seconds
+ 60000.0 milliseconds => 1 minute
+ 100000.0 milliseconds => 100 seconds
+ 1000000.0 milliseconds => 1000 seconds
+ 7000000.0 milliseconds => 7000 seconds
+ 1.0E7 milliseconds => 10000 seconds
+ 1.2E7 milliseconds => 200 minutes
+ 2.4E7 milliseconds => 400 minutes
+ 3.0E7 milliseconds => 500 minutes
+ 6.0E7 milliseconds => 1000 minutes
+ 1.0E8 milliseconds => 100000 seconds
+ 1.0E9 milliseconds => 1000000 seconds
+ 1.0E12 milliseconds => 1000000000 seconds
+ 0.0 seconds => 0 days
+ 1.0 seconds => 1 second
+ 7.0 seconds => 7 seconds
+ 10.0 seconds => 10 seconds
+ 12.0 seconds => 12 seconds
+ 24.0 seconds => 24 seconds
+ 30.0 seconds => 30 seconds
+ 49.0 seconds => 49 seconds
+ 60.0 seconds => 1 minute
+ 70.0 seconds => 70 seconds
+ 84.0 seconds => 84 seconds
+ 100.0 seconds => 100 seconds
+ 120.0 seconds => 2 minutes
+ 144.0 seconds => 144 seconds
+ 168.0 seconds => 168 seconds
+ 210.0 seconds => 210 seconds
+ 240.0 seconds => 4 minutes
+ 288.0 seconds => 288 seconds
+ 300.0 seconds => 5 minutes
+ 360.0 seconds => 6 minutes
+ 420.0 seconds => 7 minutes
+ 576.0 seconds => 576 seconds
+ 600.0 seconds => 10 minutes
+ 700.0 seconds => 700 seconds
+ 720.0 seconds => 12 minutes
+ 900.0 seconds => 15 minutes
+ 1000.0 seconds => 1000 seconds
+ 1200.0 seconds => 20 minutes
+ 1440.0 seconds => 24 minutes
+ 1800.0 seconds => 30 minutes
+ 2400.0 seconds => 40 minutes
+ 3000.0 seconds => 50 minutes
+ 3600.0 seconds => 1 hour
+ 6000.0 seconds => 100 minutes
+ 7000.0 seconds => 7000 seconds
+ 10000.0 seconds => 10000 seconds
+ 12000.0 seconds => 200 minutes
+ 24000.0 seconds => 400 minutes
+ 30000.0 seconds => 500 minutes
+ 60000.0 seconds => 1000 minutes
+ 100000.0 seconds => 100000 seconds
+ 1000000.0 seconds => 1000000 seconds
+ 7000000.0 seconds => 7000000 seconds
+ 1.0E7 seconds => 10000000 seconds
+ 1.2E7 seconds => 200000 minutes
+ 2.4E7 seconds => 400000 minutes
+ 3.0E7 seconds => 500000 minutes
+ 6.0E7 seconds => 1000000 minutes
+ 1.0E8 seconds => 100000000 seconds
+ 1.0E9 seconds => 1000000000 seconds
+ 1.0E12 seconds => class java.lang.IllegalArgumentException
+ 0.0 minutes => 0 days
+ 1.0 minutes => 1 minute
+ 7.0 minutes => 7 minutes
+ 10.0 minutes => 10 minutes
+ 12.0 minutes => 12 minutes
+ 24.0 minutes => 24 minutes
+ 30.0 minutes => 30 minutes
+ 49.0 minutes => 49 minutes
+ 60.0 minutes => 1 hour
+ 70.0 minutes => 70 minutes
+ 84.0 minutes => 84 minutes
+ 100.0 minutes => 100 minutes
+ 120.0 minutes => 2 hours
+ 144.0 minutes => 144 minutes
+ 168.0 minutes => 168 minutes
+ 210.0 minutes => 210 minutes
+ 240.0 minutes => 4 hours
+ 288.0 minutes => 288 minutes
+ 300.0 minutes => 5 hours
+ 360.0 minutes => 6 hours
+ 420.0 minutes => 7 hours
+ 576.0 minutes => 576 minutes
+ 600.0 minutes => 10 hours
+ 700.0 minutes => 700 minutes
+ 720.0 minutes => 12 hours
+ 900.0 minutes => 15 hours
+ 1000.0 minutes => 1000 minutes
+ 1200.0 minutes => 20 hours
+ 1440.0 minutes => 1 day
+ 1800.0 minutes => 30 hours
+ 2400.0 minutes => 40 hours
+ 3000.0 minutes => 50 hours
+ 3600.0 minutes => 60 hours
+ 6000.0 minutes => 100 hours
+ 7000.0 minutes => 7000 minutes
+ 10000.0 minutes => 10000 minutes
+ 12000.0 minutes => 200 hours
+ 24000.0 minutes => 400 hours
+ 30000.0 minutes => 500 hours
+ 60000.0 minutes => 1000 hours
+ 100000.0 minutes => 100000 minutes
+ 1000000.0 minutes => 1000000 minutes
+ 7000000.0 minutes => 7000000 minutes
+ 1.0E7 minutes => 10000000 minutes
+ 1.2E7 minutes => 200000 hours
+ 2.4E7 minutes => 400000 hours
+ 3.0E7 minutes => 500000 hours
+ 6.0E7 minutes => 1000000 hours
+ 1.0E8 minutes => 100000000 minutes
+ 1.0E9 minutes => class java.lang.IllegalArgumentException
+ 1.0E12 minutes => class java.lang.IllegalArgumentException
+ 0.0 hours => 0 days
+ 1.0 hours => 1 hour
+ 7.0 hours => 7 hours
+ 10.0 hours => 10 hours
+ 12.0 hours => 12 hours
+ 24.0 hours => 1 day
+ 30.0 hours => 30 hours
+ 49.0 hours => 49 hours
+ 60.0 hours => 60 hours
+ 70.0 hours => 70 hours
+ 84.0 hours => 84 hours
+ 100.0 hours => 100 hours
+ 120.0 hours => 5 days
+ 144.0 hours => 6 days
+ 168.0 hours => 7 days
+ 210.0 hours => 210 hours
+ 240.0 hours => 10 days
+ 288.0 hours => 12 days
+ 300.0 hours => 300 hours
+ 360.0 hours => 15 days
+ 420.0 hours => 420 hours
+ 576.0 hours => 24 days
+ 600.0 hours => 25 days
+ 700.0 hours => 700 hours
+ 720.0 hours => 30 days
+ 900.0 hours => 900 hours
+ 1000.0 hours => 1000 hours
+ 1200.0 hours => 50 days
+ 1440.0 hours => 60 days
+ 1800.0 hours => 75 days
+ 2400.0 hours => 100 days
+ 3000.0 hours => 125 days
+ 3600.0 hours => 150 days
+ 6000.0 hours => 250 days
+ 7000.0 hours => 7000 hours
+ 10000.0 hours => 10000 hours
+ 12000.0 hours => 500 days
+ 24000.0 hours => 1000 days
+ 30000.0 hours => 1250 days
+ 60000.0 hours => 2500 days
+ 100000.0 hours => 100000 hours
+ 1000000.0 hours => 1000000 hours
+ 7000000.0 hours => class java.lang.IllegalArgumentException
+ 1.0E7 hours => class java.lang.IllegalArgumentException
+ 1.2E7 hours => class java.lang.IllegalArgumentException
+ 2.4E7 hours => class java.lang.IllegalArgumentException
+ 3.0E7 hours => class java.lang.IllegalArgumentException
+ 6.0E7 hours => class java.lang.IllegalArgumentException
+ 1.0E8 hours => class java.lang.IllegalArgumentException
+ 1.0E9 hours => class java.lang.IllegalArgumentException
+ 1.0E12 hours => class java.lang.IllegalArgumentException
+ 0.0 days => 0 days
+ 1.0 days => 1 day
+ 7.0 days => 7 days
+ 10.0 days => 10 days
+ 12.0 days => 12 days
+ 24.0 days => 24 days
+ 30.0 days => 30 days
+ 49.0 days => 49 days
+ 60.0 days => 60 days
+ 70.0 days => 70 days
+ 84.0 days => 84 days
+ 100.0 days => 100 days
+ 120.0 days => 120 days
+ 144.0 days => 144 days
+ 168.0 days => 168 days
+ 210.0 days => 210 days
+ 240.0 days => 240 days
+ 288.0 days => 288 days
+ 300.0 days => 300 days
+ 360.0 days => 360 days
+ 420.0 days => 420 days
+ 576.0 days => 576 days
+ 600.0 days => 600 days
+ 700.0 days => 700 days
+ 720.0 days => 720 days
+ 900.0 days => 900 days
+ 1000.0 days => 1000 days
+ 1200.0 days => 1200 days
+ 1440.0 days => 1440 days
+ 1800.0 days => 1800 days
+ 2400.0 days => 2400 days
+ 3000.0 days => 3000 days
+ 3600.0 days => 3600 days
+ 6000.0 days => 6000 days
+ 7000.0 days => 7000 days
+ 10000.0 days => 10000 days
+ 12000.0 days => 12000 days
+ 24000.0 days => 24000 days
+ 30000.0 days => 30000 days
+ 60000.0 days => 60000 days
+ 100000.0 days => 100000 days
+ 1000000.0 days => class java.lang.IllegalArgumentException
+ 7000000.0 days => class java.lang.IllegalArgumentException
+ 1.0E7 days => class java.lang.IllegalArgumentException
+ 1.2E7 days => class java.lang.IllegalArgumentException
+ 2.4E7 days => class java.lang.IllegalArgumentException
+ 3.0E7 days => class java.lang.IllegalArgumentException
+ 6.0E7 days => class java.lang.IllegalArgumentException
+ 1.0E8 days => class java.lang.IllegalArgumentException
+ 1.0E9 days => class java.lang.IllegalArgumentException
+ 1.0E12 days => class java.lang.IllegalArgumentException
+10000000000000001 nanoseconds => 10000000000000001 nanoseconds
+10000000000000002 nanoseconds => 10000000000000002 nanoseconds
+ Inf => Duration.Inf
+ -Inf => Duration.MinusInf
+ +Inf => Duration.Inf
+ PlusInf => Duration.Inf
+ MinusInf => Duration.MinusInf
diff --git a/test/files/jvm/duration-java/Test.java b/test/files/jvm/duration-java/Test.java
new file mode 100644
index 0000000000..94f3f83db8
--- /dev/null
+++ b/test/files/jvm/duration-java/Test.java
@@ -0,0 +1,46 @@
+import scala.concurrent.duration.Duration;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+import static java.util.concurrent.TimeUnit.*;
+
+public class Test {
+ public static List<Double> inputs = Arrays.asList(0d, 1d, 7d, 10d, 12d, 24d, 30d, 60d, 100d, 1000d, 1e6);
+ public static List<Double> makeNumbers() {
+ ArrayList<Double> xs = new ArrayList<Double>();
+ for (Double n1: inputs) {
+ for (Double n2: inputs) {
+ Double n = n1 * n2;
+ if (!xs.contains(n))
+ xs.add(n);
+ }
+ }
+ Double[] arr = xs.toArray(new Double[0]);
+ Arrays.sort(arr);
+ return Arrays.asList(arr);
+ }
+
+ public static void p(Object x) {
+ System.out.println(x);
+ }
+ public static void main(String[] args) {
+ for (TimeUnit t : TimeUnit.values()) {
+ for (Double n: makeNumbers()) {
+ String s = "" + n + " " + t.toString().toLowerCase();
+ String result;
+ try {
+ Duration d = Duration.create(n, t);
+ result = d.toString();
+ } catch(Exception e) {
+ result = e.getClass().toString();
+ }
+ p(String.format("%25s => %s", s, result));
+ }
+ }
+ for (String s: new String[] {"10000000000000001 nanoseconds", "10000000000000002 nanoseconds"})
+ p(String.format("%25s => %s", s, Duration.create(s)));
+ for (String s: Arrays.asList("Inf", "-Inf", "+Inf", "PlusInf", "MinusInf")) {
+ Duration d = Duration.create(s);
+ p(String.format("%25s => %s", s, d));
+ }
+ }
+}
diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala
new file mode 100644
index 0000000000..df1052fed3
--- /dev/null
+++ b/test/files/jvm/duration-tck.scala
@@ -0,0 +1,194 @@
+/**
+ * Copyright (C) 2012 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+import scala.concurrent.duration._
+import scala.reflect._
+import scala.tools.partest.TestUtil.intercept
+
+object Test extends App {
+
+ implicit class Assert(val left: Any) extends AnyVal {
+ import Duration.Undefined
+ def mustBe(right: Any) = right match {
+ case r: Double if r.isNaN => assert(left.asInstanceOf[Double].isNaN, s"$left was not NaN")
+ case r: Double if r == 0 && r.compareTo(0) == -1 => assert(left == 0 && left.asInstanceOf[Double].compareTo(0) == -1, s"$left was not -0.0")
+ case Undefined => assert(left.asInstanceOf[AnyRef] eq Undefined, s"$left was not Undefined")
+ case _ => assert(left == right, s"$left was not equal to $right")
+ }
+ }
+
+ val zero = 0 seconds
+ val one = 1 second
+ val two = one + one
+ val three = 3 * one
+ val inf = Duration.Inf
+ val minf = Duration.MinusInf
+ val undef = Duration.Undefined
+ val inputs = List(zero, one, inf, minf, undef)
+ val nan = Double.NaN
+
+ // test field ops
+ one.isFinite mustBe true
+ 0 * one mustBe zero
+ 2 * one mustBe two
+ three - two mustBe one
+ three / 3 mustBe one
+ two / one mustBe 2
+ one + zero mustBe one
+ one / 1000000 mustBe 1.micro
+
+
+ // test infinities
+
+ inf.isFinite mustBe false
+ minf.isFinite mustBe false
+
+ inf mustBe inf
+ minf mustBe minf
+ -inf mustBe minf
+ -minf mustBe inf
+
+ minf + inf mustBe undef
+ inf - inf mustBe undef
+ inf + minf mustBe undef
+ minf - minf mustBe undef
+
+ inf + inf mustBe inf
+ inf - minf mustBe inf
+ minf - inf mustBe minf
+ minf + minf mustBe minf
+
+ inf.compareTo(inf) mustBe 0
+ inf.compareTo(one) mustBe 1
+ inf.compareTo(minf) mustBe 1
+ minf.compareTo(minf) mustBe 0
+ minf.compareTo(one) mustBe -1
+ minf.compareTo(inf) mustBe -1
+
+ assert(inf != minf)
+ assert(minf != inf)
+ assert(one != inf)
+ assert(minf != one)
+
+ inf mustBe (minf * -1d)
+ inf mustBe (minf / -1d)
+
+ one / inf mustBe 0d
+ -one / inf mustBe -0d
+ one / minf mustBe -0d
+ -one / minf mustBe 0d
+
+ inputs filterNot (_.isFinite) foreach (x => x / zero mustBe x.toUnit(DAYS))
+ inputs filterNot (_.isFinite) foreach (_ * 0d mustBe undef)
+ inputs filterNot (_.isFinite) foreach (_ * -0d mustBe undef)
+ inputs filterNot (_.isFinite) foreach (x => x * Double.PositiveInfinity mustBe x)
+ inputs filterNot (_.isFinite) foreach (x => x * Double.NegativeInfinity mustBe -x)
+
+ inf.toUnit(SECONDS) mustBe Double.PositiveInfinity
+ minf.toUnit(MINUTES) mustBe Double.NegativeInfinity
+ Duration.fromNanos(Double.PositiveInfinity) mustBe inf
+ Duration.fromNanos(Double.NegativeInfinity) mustBe minf
+
+
+ // test undefined & NaN
+
+ undef.isFinite mustBe false
+ -undef mustBe undef
+ assert(undef != undef)
+ assert(undef eq undef)
+
+ inputs foreach (_ + undef mustBe undef)
+ inputs foreach (_ - undef mustBe undef)
+ inputs foreach (_ / undef mustBe nan)
+ inputs foreach (_ / nan mustBe undef)
+ inputs foreach (_ * nan mustBe undef)
+ inputs foreach (undef + _ mustBe undef)
+ inputs foreach (undef - _ mustBe undef)
+ inputs foreach (undef / _ mustBe nan)
+ undef / 1 mustBe undef
+ undef / nan mustBe undef
+ undef * 1 mustBe undef
+ undef * nan mustBe undef
+ inputs foreach (x => x / zero mustBe x.toUnit(SECONDS) / 0d)
+ inputs foreach (x => x / 0d mustBe Duration.fromNanos(x.toUnit(NANOSECONDS) / 0d))
+ inputs foreach (x => x / -0d mustBe Duration.fromNanos(x.toUnit(NANOSECONDS) / -0d))
+
+ inputs filterNot (_ eq undef) foreach (_ compareTo undef mustBe -1)
+ inputs filterNot (_ eq undef) foreach (undef compareTo _ mustBe 1)
+ undef compare undef mustBe 0
+
+ undef.toUnit(DAYS) mustBe nan
+ Duration.fromNanos(nan) mustBe undef
+
+
+ // test overflow protection
+ for (unit ← Seq(DAYS, HOURS, MINUTES, SECONDS, MILLISECONDS, MICROSECONDS, NANOSECONDS)) {
+ val x = unit.convert(Long.MaxValue, NANOSECONDS)
+ val dur = Duration(x, unit)
+ val mdur = Duration(-x, unit)
+ -mdur mustBe (dur)
+ intercept[IllegalArgumentException] { Duration(x + 10000000d, unit) }
+ intercept[IllegalArgumentException] { Duration(-x - 10000000d, unit) }
+ if (unit != NANOSECONDS) {
+ intercept[IllegalArgumentException] { Duration(x + 1, unit) }
+ intercept[IllegalArgumentException] { Duration(-x - 1, unit) }
+ }
+ intercept[IllegalArgumentException] { dur + 1.day }
+ intercept[IllegalArgumentException] { mdur - 1.day }
+ intercept[IllegalArgumentException] { dur * 1.1 }
+ intercept[IllegalArgumentException] { mdur * 1.1 }
+ intercept[IllegalArgumentException] { dur * 2.1 }
+ intercept[IllegalArgumentException] { mdur * 2.1 }
+ intercept[IllegalArgumentException] { dur / 0.9 }
+ intercept[IllegalArgumentException] { mdur / 0.9 }
+ intercept[IllegalArgumentException] { dur / 0.4 }
+ intercept[IllegalArgumentException] { mdur / 0.4 }
+ Duration(x + unit.toString.toLowerCase)
+ Duration("-" + x + unit.toString.toLowerCase)
+ intercept[IllegalArgumentException] { Duration("%.0f".format(x + 10000000d) + unit.toString.toLowerCase) }
+ intercept[IllegalArgumentException] { Duration("-%.0f".format(x + 10000000d) + unit.toString.toLowerCase) }
+ }
+ intercept[IllegalArgumentException] { Duration.fromNanos(1e20) }
+ intercept[IllegalArgumentException] { Duration.fromNanos(-1e20) }
+
+
+ // test precision
+ 1.second + 1.millisecond mustBe 1001.milliseconds
+ 100000.days + 1.nanosecond mustBe 8640000000000000001L.nanoseconds
+ 1.5.seconds.toSeconds mustBe 1
+ (-1.5).seconds.toSeconds mustBe -1
+
+
+ // test unit stability
+ 1000.millis.unit mustBe MILLISECONDS
+ (1000.millis + 0.days).unit mustBe MILLISECONDS
+ 1.second.unit mustBe SECONDS
+ (1.second + 1.millisecond).unit mustBe MILLISECONDS
+
+
+ // test Deadline
+ val dead = 2.seconds.fromNow
+ val dead2 = 2 seconds fromNow
+ assert(dead.timeLeft > 1.second)
+ assert(dead2.timeLeft > 1.second)
+ Thread.sleep(1.second.toMillis)
+ assert(dead.timeLeft < 1.second)
+ assert(dead2.timeLeft < 1.second)
+
+
+ // test integer mul/div
+ 500.millis * 2 mustBe 1.second
+ (500.millis * 2).unit mustBe MILLISECONDS
+ 1.second / 2 mustBe 500.millis
+ (1.second / 2).unit mustBe MILLISECONDS
+
+
+ // check statically retaining finite-ness
+ val finiteDuration: FiniteDuration = 1.second * 2 / 3 mul 5 div 4 plus 3.seconds minus 1.millisecond min 1.second max 1.second
+ val finite2: FiniteDuration = 2 * 1.second + 3L * 2.seconds
+ finite2 mustBe 8.seconds
+ ((2 seconds fromNow).timeLeft: FiniteDuration) < 4.seconds mustBe true
+ val finite3: FiniteDuration = 3.5 seconds span
+
+}
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index 31bb8c4e44..8674be168c 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -2,8 +2,8 @@
import scala.concurrent._
-import scala.concurrent.util.duration._
-import scala.concurrent.util.Duration.Inf
+import scala.concurrent.duration._
+import scala.concurrent.duration.Duration.Inf
import scala.collection._
import scala.runtime.NonLocalReturnControl
import scala.util.{Try,Success,Failure}
diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala
index d9aaa1d5ed..8e07393900 100644
--- a/test/files/jvm/future-spec/PromiseTests.scala
+++ b/test/files/jvm/future-spec/PromiseTests.scala
@@ -2,8 +2,8 @@
import scala.concurrent._
-import scala.concurrent.util.duration._
-import scala.concurrent.util.Duration.Inf
+import scala.concurrent.duration._
+import scala.concurrent.duration.Duration.Inf
import scala.collection._
import scala.runtime.NonLocalReturnControl
import scala.util.{Try,Success,Failure}
diff --git a/test/files/jvm/future-spec/TryTests.scala b/test/files/jvm/future-spec/TryTests.scala
index 82ca12276f..5d1b9b84b4 100644
--- a/test/files/jvm/future-spec/TryTests.scala
+++ b/test/files/jvm/future-spec/TryTests.scala
@@ -46,6 +46,12 @@ object TryTests extends MinimalScalaTest {
val e2 = new Exception
Failure[Int](e) map(_ => throw e2) mustEqual Failure(e)
}
+ "when there is a fatal exception" in {
+ val e3 = new ThreadDeath
+ intercept[ThreadDeath] {
+ Success(1) map (_ => throw e3)
+ }
+ }
}
"flatMap" in {
@@ -60,6 +66,12 @@ object TryTests extends MinimalScalaTest {
val e2 = new Exception
Failure[Int](e).flatMap[Int](_ => throw e2) mustEqual Failure(e)
}
+ "when there is a fatal exception" in {
+ val e3 = new ThreadDeath
+ intercept[ThreadDeath] {
+ Success(1).flatMap[Int](_ => throw e3)
+ }
+ }
}
"flatten" in {
@@ -115,4 +127,4 @@ object TryTests extends MinimalScalaTest {
}
}
}
-} \ No newline at end of file
+}
diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala
index 57183d8cea..90048ccda0 100644
--- a/test/files/jvm/future-spec/main.scala
+++ b/test/files/jvm/future-spec/main.scala
@@ -3,7 +3,7 @@
import scala.collection._
import scala.concurrent._
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
import java.util.concurrent.{ TimeoutException, CountDownLatch, TimeUnit }
diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check
index 1c0bf6a568..a1ff9491cf 100644
--- a/test/files/jvm/manifests-new.check
+++ b/test/files/jvm/manifests-new.check
@@ -1,58 +1,58 @@
-x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit
-x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean
-x=a, t=TypeTag[Char], k=TypeRef, s=class Char
-x=1, t=TypeTag[Int], k=TypeRef, s=class Int
-x=abc, t=TypeTag[java.lang.String], k=TypeRef, s=class String
-x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol
-
-x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List
-x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List
-x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List
-x=List(abc), t=TypeTag[List[java.lang.String]], k=TypeRef, s=class List
-x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List
-
-x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array
-x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array
-x=[I, t=TypeTag[Array[Int]], k=TypeRef, s=class Array
-x=[Ljava.lang.String;, t=TypeTag[Array[java.lang.String]], k=TypeRef, s=class Array
-x=[Lscala.Symbol;, t=TypeTag[Array[Symbol]], k=TypeRef, s=class Array
-
-x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2
-x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2
-x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2
-x=(abc,xyz), t=TypeTag[(java.lang.String, java.lang.String)], k=TypeRef, s=class Tuple2
-x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2
-
-x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test
-x=scala.collection.immutable.List$, t=TypeTag[scala.collection.immutable.List.type], k=SingleType, s=object List
-
-x=Foo, t=TypeTag[Foo[Int]], k=TypeRef, s=class Foo
-x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo
-x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo
-x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo
-
-x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
-x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
-
-()=()
-true=true
-a=a
-1=1
-'abc='abc
-
-List(())=List(())
-List(true)=List(true)
-List('abc)=List('abc)
-
-Array()=Array()
-Array(true)=Array(true)
-Array(a)=Array(a)
-Array(1)=Array(1)
-
-((),())=((),())
-(true,false)=(true,false)
-
-List(List(1), List(2))=List(List(1), List(2))
-
-Array(Array(1), Array(2))=Array(Array(1), Array(2))
-
+x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit
+x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean
+x=a, t=TypeTag[Char], k=TypeRef, s=class Char
+x=1, t=TypeTag[Int], k=TypeRef, s=class Int
+x=abc, t=TypeTag[java.lang.String], k=TypeRef, s=class String
+x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol
+
+x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List
+x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List
+x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List
+x=List(abc), t=TypeTag[List[java.lang.String]], k=TypeRef, s=class List
+x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List
+
+x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array
+x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array
+x=[I, t=TypeTag[Array[Int]], k=TypeRef, s=class Array
+x=[Ljava.lang.String;, t=TypeTag[Array[java.lang.String]], k=TypeRef, s=class Array
+x=[Lscala.Symbol;, t=TypeTag[Array[Symbol]], k=TypeRef, s=class Array
+
+x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2
+x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2
+x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2
+x=(abc,xyz), t=TypeTag[(java.lang.String, java.lang.String)], k=TypeRef, s=class Tuple2
+x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2
+
+x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test
+x=scala.collection.immutable.List$, t=TypeTag[scala.collection.immutable.List.type], k=SingleType, s=object List
+
+x=Foo, t=TypeTag[Foo[Int]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo
+
+x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+
+()=()
+true=true
+a=a
+1=1
+'abc='abc
+
+List(())=List(())
+List(true)=List(true)
+List('abc)=List('abc)
+
+Array()=Array()
+Array(true)=Array(true)
+Array(a)=Array(a)
+Array(1)=Array(1)
+
+((),())=((),())
+(true,false)=(true,false)
+
+List(List(1), List(2))=List(List(1), List(2))
+
+Array(Array(1), Array(2))=Array(Array(1), Array(2))
+
diff --git a/test/files/jvm/manifests-new.scala b/test/files/jvm/manifests-new.scala
index 8706881640..f730be67bb 100644
--- a/test/files/jvm/manifests-new.scala
+++ b/test/files/jvm/manifests-new.scala
@@ -106,6 +106,6 @@ trait TestUtil {
// val t1: TypeTag[T] = read(write(t))
val t1: TypeTag[T] = t
val x1 = x.toString.replaceAll("@[0-9a-z]+$", "")
- println("x="+x1+", t="+t1+", k="+t1.tpe.kind+", s="+t1.tpe.typeSymbol.toString)
+ println("x="+x1+", t="+t1+", k="+t1.tpe.asInstanceOf[Product].productPrefix+", s="+t1.tpe.typeSymbol.toString)
}
} \ No newline at end of file
diff --git a/test/files/jvm/mkLibNatives.bat b/test/files/jvm/mkLibNatives.bat
index 2f99f7aab5..623039b867 100755
--- a/test/files/jvm/mkLibNatives.bat
+++ b/test/files/jvm/mkLibNatives.bat
@@ -1,70 +1,70 @@
-@echo off
-
-rem ##########################################################################
-rem # Author : Stephane Micheloud
-rem ##########################################################################
-
-rem # For building the -64.dll, you need: Visual C++ Express, Microsoft SDK (to
-rem # get the 64bit compiler / libraries), adapt this script.
-
-rem ##########################################################################
-rem # variables
-
-if "%OS%"=="Windows_NT" @setlocal
-
-rem debug switches are: off=0, on=1
-set DEBUG=0
-set STDOUT=NUL
-if %DEBUG%==1 set STDOUT=CON
-
-set CLASS_NAME=Test$
-set CLASS_DIR=.
-
-set OBJ_NAME=natives
-set LIB_NAME=natives-32
-
-if "%JAVA_HOME%"=="" goto error1
-if "%VSINSTALLDIR%"=="" goto error2
-
-set JAVAH=%JAVA_HOME%\bin\javah
-set JAVAH_OPTIONS=-jni -force -classpath %CLASS_DIR% -o %OBJ_NAME%.h
-
-set CC=%VSINSTALLDIR%\vc\bin\cl
-set CC_OPTIONS=/nologo /c
-set CC_INCLUDES=-I%VSINSTALLDIR%\vc\include -I%JAVA_HOME%\include -I%JAVA_HOME%\include\win32
-
-set LNK_OPTIONS=/nologo /MT /LD
-
-rem variable LIB is used by the C++ linker to find libcmt.lib, ..
-set LIB=%VSINSTALLDIR%\vc\lib
-
-rem ##########################################################################
-rem # commands
-
-del /s/q *.obj *.exp *.lib *.dll 1>%STDOUT%
-
-if %DEBUG%==1 echo %JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
-%JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
-
-if %DEBUG%==1 echo %CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c
-%CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c 1>%STDOUT%
-
-if %DEBUG%==1 echo %CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj
-%CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj 1>%STDOUT%
-
-goto end
-
-rem ##########################################################################
-rem # subroutines
-
-:error1
-echo ERROR: environment variable JAVA_HOME is undefined. It should point to your JDK installation.
-goto end
-
-:error2
-echo ERROR: environment variable VSINSTALLDIR is undefined. It should point to your MS Visual Studio installation.
-goto end
-
-:end
-if "%OS%"=="Windows_NT" @endlocal
-exit /b %errorlevel%
+@echo off
+
+rem ##########################################################################
+rem # Author : Stephane Micheloud
+rem ##########################################################################
+
+rem # For building the -64.dll, you need: Visual C++ Express, Microsoft SDK (to
+rem # get the 64bit compiler / libraries), adapt this script.
+
+rem ##########################################################################
+rem # variables
+
+if "%OS%"=="Windows_NT" @setlocal
+
+rem debug switches are: off=0, on=1
+set DEBUG=0
+set STDOUT=NUL
+if %DEBUG%==1 set STDOUT=CON
+
+set CLASS_NAME=Test$
+set CLASS_DIR=.
+
+set OBJ_NAME=natives
+set LIB_NAME=natives-32
+
+if "%JAVA_HOME%"=="" goto error1
+if "%VSINSTALLDIR%"=="" goto error2
+
+set JAVAH=%JAVA_HOME%\bin\javah
+set JAVAH_OPTIONS=-jni -force -classpath %CLASS_DIR% -o %OBJ_NAME%.h
+
+set CC=%VSINSTALLDIR%\vc\bin\cl
+set CC_OPTIONS=/nologo /c
+set CC_INCLUDES=-I%VSINSTALLDIR%\vc\include -I%JAVA_HOME%\include -I%JAVA_HOME%\include\win32
+
+set LNK_OPTIONS=/nologo /MT /LD
+
+rem variable LIB is used by the C++ linker to find libcmt.lib, ..
+set LIB=%VSINSTALLDIR%\vc\lib
+
+rem ##########################################################################
+rem # commands
+
+del /s/q *.obj *.exp *.lib *.dll 1>%STDOUT%
+
+if %DEBUG%==1 echo %JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
+%JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
+
+if %DEBUG%==1 echo %CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c
+%CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c 1>%STDOUT%
+
+if %DEBUG%==1 echo %CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj
+%CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj 1>%STDOUT%
+
+goto end
+
+rem ##########################################################################
+rem # subroutines
+
+:error1
+echo ERROR: environment variable JAVA_HOME is undefined. It should point to your JDK installation.
+goto end
+
+:error2
+echo ERROR: environment variable VSINSTALLDIR is undefined. It should point to your MS Visual Studio installation.
+goto end
+
+:end
+if "%OS%"=="Windows_NT" @endlocal
+exit /b %errorlevel%
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index ffb5608fd2..b529bca38a 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -10,7 +10,9 @@ import scala.concurrent.{
}
import scala.concurrent.{ future, promise, blocking }
import scala.util.{ Try, Success, Failure }
-import scala.concurrent.util.Duration
+import scala.concurrent.duration.Duration
+import scala.reflect.{ classTag, ClassTag }
+import scala.tools.partest.TestUtil.intercept
trait TestBase {
@@ -19,7 +21,7 @@ trait TestBase {
body(() => sv put true)
sv.take(2000)
}
-
+
// def assert(cond: => Boolean) {
// try {
// Predef.assert(cond)
@@ -663,6 +665,29 @@ trait FutureProjections extends TestBase {
case nsee: NoSuchElementException => done()
}
}
+
+ def testAwaitPositiveDuration(): Unit = once { done =>
+ val p = Promise[Int]()
+ val f = p.future
+ future {
+ intercept[IllegalArgumentException] { Await.ready(f, Duration.Undefined) }
+ p.success(0)
+ Await.ready(f, Duration.Zero)
+ Await.ready(f, Duration(500, "ms"))
+ Await.ready(f, Duration.Inf)
+ done()
+ } onFailure { case x => throw x }
+ }
+
+ def testAwaitNegativeDuration(): Unit = once { done =>
+ val f = Promise().future
+ future {
+ intercept[TimeoutException] { Await.ready(f, Duration.Zero) }
+ intercept[TimeoutException] { Await.ready(f, Duration.MinusInf) }
+ intercept[TimeoutException] { Await.ready(f, Duration(-500, "ms")) }
+ done()
+ } onFailure { case x => throw x }
+ }
testFailedFailureOnComplete()
testFailedFailureOnSuccess()
@@ -670,6 +695,8 @@ trait FutureProjections extends TestBase {
testFailedSuccessOnFailure()
testFailedFailureAwait()
testFailedSuccessAwait()
+ testAwaitPositiveDuration()
+ testAwaitNegativeDuration()
}
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index fa51c6a879..f886cfe29c 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -168,6 +168,30 @@ x = History()
y = History()
x equals y: true, y equals x: true
+x = Map(Linked -> 1, Hash -> 2, Map -> 3)
+y = Map(Linked -> 1, Hash -> 2, Map -> 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = List((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = ArrayBuffer(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = List(layers, buffers, title)
+x equals y: true, y equals x: true
+
x = ListBuffer(white, black)
y = ListBuffer(white, black)
x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization-new.scala b/test/files/jvm/serialization-new.scala
index 91eb52928f..1522fc8e27 100644
--- a/test/files/jvm/serialization-new.scala
+++ b/test/files/jvm/serialization-new.scala
@@ -285,8 +285,8 @@ object Test3_mutable {
import scala.reflect.ClassTag
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
- HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray, TreeSet}
+ HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer,
+ Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet}
import scala.collection.concurrent.TrieMap
// in alphabetic order
@@ -346,6 +346,26 @@ object Test3_mutable {
val h1 = new History[String, Int]
val _h1: History[String, Int] = read(write(h1))
check(h1, _h1)
+
+ // LinkedHashMap
+ { val lhm1 = new LinkedHashMap[String, Int]
+ val list = List(("Linked", 1), ("Hash", 2), ("Map", 3))
+ lhm1 ++= list.iterator
+ val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1))
+ check(lhm1, _lhm1)
+ check(lhm1.toSeq, _lhm1.toSeq) // check elements order
+ check(lhm1.toSeq, list) // check elements order
+ }
+
+ // LinkedHashSet
+ { val lhs1 = new LinkedHashSet[String]
+ val list = List("layers", "buffers", "title")
+ lhs1 ++= list.iterator
+ val _lhs1: LinkedHashSet[String] = read(write(lhs1))
+ check(lhs1, _lhs1)
+ check(lhs1.toSeq, _lhs1.toSeq) // check elements order
+ check(lhs1.toSeq, list) // check elements order
+ }
/*
// LinkedList
val ll1 = new LinkedList[Int](2, null)
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index fa51c6a879..f886cfe29c 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -168,6 +168,30 @@ x = History()
y = History()
x equals y: true, y equals x: true
+x = Map(Linked -> 1, Hash -> 2, Map -> 3)
+y = Map(Linked -> 1, Hash -> 2, Map -> 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = List((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = ArrayBuffer(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = List(layers, buffers, title)
+x equals y: true, y equals x: true
+
x = ListBuffer(white, black)
y = ListBuffer(white, black)
x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 9c2f2acdbf..34b64938b4 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -285,8 +285,8 @@ object Test3_mutable {
import scala.reflect.ClassManifest
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
- HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray, TreeSet}
+ HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer,
+ Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet}
import scala.collection.concurrent.TrieMap
// in alphabetic order
@@ -346,6 +346,26 @@ object Test3_mutable {
val h1 = new History[String, Int]
val _h1: History[String, Int] = read(write(h1))
check(h1, _h1)
+
+ // LinkedHashMap
+ { val lhm1 = new LinkedHashMap[String, Int]
+ val list = List(("Linked", 1), ("Hash", 2), ("Map", 3))
+ lhm1 ++= list.iterator
+ val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1))
+ check(lhm1, _lhm1)
+ check(lhm1.toSeq, _lhm1.toSeq) // check elements order
+ check(lhm1.toSeq, list) // check elements order
+ }
+
+ // LinkedHashSet
+ { val lhs1 = new LinkedHashSet[String]
+ val list = List("layers", "buffers", "title")
+ lhs1 ++= list.iterator
+ val _lhs1: LinkedHashSet[String] = read(write(lhs1))
+ check(lhs1, _lhs1)
+ check(lhs1.toSeq, _lhs1.toSeq) // check elements order
+ check(lhs1.toSeq, list) // check elements order
+ }
/*
// LinkedList
val ll1 = new LinkedList[Int](2, null)
diff --git a/test/files/lib/javac-artifacts.jar.desired.sha1 b/test/files/lib/javac-artifacts.jar.desired.sha1
new file mode 100644
index 0000000000..8dbbc1d451
--- /dev/null
+++ b/test/files/lib/javac-artifacts.jar.desired.sha1
@@ -0,0 +1 @@
+c5788c5e518eb267445c5a995fd98b2210f90a58 ?javac-artifacts.jar
diff --git a/test/files/neg/any-vs-anyref.check b/test/files/neg/any-vs-anyref.check
new file mode 100644
index 0000000000..63c4853130
--- /dev/null
+++ b/test/files/neg/any-vs-anyref.check
@@ -0,0 +1,64 @@
+any-vs-anyref.scala:6: error: type mismatch;
+ found : a.type (with underlying type A)
+ required: AnyRef
+Note that A is bounded only by Equals, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo1[A <: Product](a: A) = { type X = a.type }
+ ^
+any-vs-anyref.scala:7: error: type mismatch;
+ found : a.type (with underlying type A)
+ required: AnyRef
+Note that A is bounded only by Product, Quux, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo2[A <: Product with Quux](a: A) = { type X = a.type }
+ ^
+any-vs-anyref.scala:8: error: type mismatch;
+ found : a.type (with underlying type Product)
+ required: AnyRef
+Note that Product extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo3(a: Product) = { type X = a.type }
+ ^
+any-vs-anyref.scala:9: error: type mismatch;
+ found : Product with Quux
+ required: AnyRef
+Note that the parents of this type (Product, Quux) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo4(a: Product with Quux) = { type X = a.type }
+ ^
+any-vs-anyref.scala:10: error: value eq is not a member of Quux with Product
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo5(x: Quux with Product) = (x eq "abc") && ("abc" eq x)
+ ^
+any-vs-anyref.scala:11: error: value eq is not a member of Quux with Product{def f: Int}
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo6(x: Quux with Product { def f: Int }) = (x eq "abc") && ("abc" eq x)
+ ^
+any-vs-anyref.scala:12: error: type mismatch;
+ found : Quux with Product{def eq(other: String): Boolean}
+ required: AnyRef
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo7(x: Quux with Product { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x)
+ ^
+any-vs-anyref.scala:22: error: value eq is not a member of Bippy
+Note that Bippy extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def bad1(x: Bippy, y: Bippy) = x eq y
+ ^
+any-vs-anyref.scala:27: error: type mismatch;
+ found : Quux{def g(x: String): String}
+ required: Quux{def g(x: Int): Int}
+ f(new Quux { def g(x: String) = x })
+ ^
+9 errors found
diff --git a/test/files/neg/any-vs-anyref.scala b/test/files/neg/any-vs-anyref.scala
new file mode 100644
index 0000000000..8d237fbaec
--- /dev/null
+++ b/test/files/neg/any-vs-anyref.scala
@@ -0,0 +1,29 @@
+trait Quux extends Any
+trait QuuxRef extends AnyRef
+final class Bippy(val x: Any) extends AnyVal with Quux
+
+object Foo {
+ def foo1[A <: Product](a: A) = { type X = a.type }
+ def foo2[A <: Product with Quux](a: A) = { type X = a.type }
+ def foo3(a: Product) = { type X = a.type }
+ def foo4(a: Product with Quux) = { type X = a.type }
+ def foo5(x: Quux with Product) = (x eq "abc") && ("abc" eq x)
+ def foo6(x: Quux with Product { def f: Int }) = (x eq "abc") && ("abc" eq x)
+ def foo7(x: Quux with Product { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x)
+
+ def ok1[A <: QuuxRef](a: A) = { type X = a.type }
+ def ok2[A <: Product with QuuxRef](a: A) = { type X = a.type }
+ def ok3(a: QuuxRef) = { type X = a.type }
+ def ok4(a: Product with QuuxRef) = { type X = a.type }
+ def ok5(x: QuuxRef with Product) = (x eq "abc") && ("abc" eq x)
+ def ok6(x: QuuxRef with Product { def f: Int }) = (x eq "abc") && ("abc" eq x)
+ def ok7(x: QuuxRef { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x)
+
+ def bad1(x: Bippy, y: Bippy) = x eq y
+}
+
+object Bar {
+ def f(x: Quux { def g(x: Int): Int }): Int = x g 5
+ f(new Quux { def g(x: String) = x })
+ f(new Quux { def g(x: Int) = x })
+}
diff --git a/test/files/neg/applydynamic_sip.check b/test/files/neg/applydynamic_sip.check
index 8845f68a52..dcf97b29fc 100644
--- a/test/files/neg/applydynamic_sip.check
+++ b/test/files/neg/applydynamic_sip.check
@@ -7,4 +7,52 @@ applydynamic_sip.scala:8: error: applyDynamicNamed does not support passing a va
applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter
qual.sel(arg, arg2 = "a2", a2: _*)
^
-three errors found
+applydynamic_sip.scala:18: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.selectDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel
+ ^
+applydynamic_sip.scala:19: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.applyDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel(1)
+ ^
+applydynamic_sip.scala:20: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.applyDynamicNamed("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel(a = 1)
+ ^
+applydynamic_sip.scala:21: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.updateDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel = 1
+ ^
+applydynamic_sip.scala:29: error: Int does not take parameters
+error after rewriting to Test.this.bad2.selectDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel
+ ^
+applydynamic_sip.scala:30: error: Int does not take parameters
+error after rewriting to Test.this.bad2.applyDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel(1)
+ ^
+applydynamic_sip.scala:31: error: Int does not take parameters
+error after rewriting to Test.this.bad2.applyDynamicNamed("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel(a = 1)
+ ^
+applydynamic_sip.scala:32: error: Int does not take parameters
+error after rewriting to Test.this.bad2.updateDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel = 1
+ ^
+11 errors found
diff --git a/test/files/neg/applydynamic_sip.flags b/test/files/neg/applydynamic_sip.flags
new file mode 100644
index 0000000000..1141f97507
--- /dev/null
+++ b/test/files/neg/applydynamic_sip.flags
@@ -0,0 +1 @@
+-language:dynamics
diff --git a/test/files/neg/applydynamic_sip.scala b/test/files/neg/applydynamic_sip.scala
index 362461577b..ee4432ebe6 100644
--- a/test/files/neg/applydynamic_sip.scala
+++ b/test/files/neg/applydynamic_sip.scala
@@ -7,4 +7,27 @@ object Test extends App {
qual.sel(a, a2: _*)
qual.sel(arg = a, a2: _*)
qual.sel(arg, arg2 = "a2", a2: _*)
-} \ No newline at end of file
+
+ val bad1 = new Dynamic {
+ def selectDynamic(n: Int) = n
+ def applyDynamic(n: Int) = n
+ def applyDynamicNamed(n: Int) = n
+ def updateDynamic(n: Int) = n
+
+ }
+ bad1.sel
+ bad1.sel(1)
+ bad1.sel(a = 1)
+ bad1.sel = 1
+
+ val bad2 = new Dynamic {
+ def selectDynamic = 1
+ def applyDynamic = 1
+ def applyDynamicNamed = 1
+ def updateDynamic = 1
+ }
+ bad2.sel
+ bad2.sel(1)
+ bad2.sel(a = 1)
+ bad2.sel = 1
+}
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index 23af94180a..d785179a56 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -1,100 +1,100 @@
-checksensible.scala:13: error: comparing a fresh object using `eq' will always yield false
- (new AnyRef) eq (new AnyRef)
- ^
-checksensible.scala:14: error: comparing a fresh object using `ne' will always yield true
- (new AnyRef) ne (new AnyRef)
- ^
-checksensible.scala:15: error: comparing a fresh object using `eq' will always yield false
- Shmoopie eq (new AnyRef)
- ^
-checksensible.scala:16: error: comparing a fresh object using `eq' will always yield false
- (Shmoopie: AnyRef) eq (new AnyRef)
- ^
-checksensible.scala:17: error: comparing a fresh object using `eq' will always yield false
- (new AnyRef) eq Shmoopie
- ^
-checksensible.scala:18: error: comparing a fresh object using `eq' will always yield false
- (new AnyRef) eq null
- ^
-checksensible.scala:19: error: comparing a fresh object using `eq' will always yield false
- null eq new AnyRef
- ^
-checksensible.scala:26: error: comparing values of types Unit and Int using `==' will always yield false
- (c = 1) == 0
- ^
-checksensible.scala:27: error: comparing values of types Int and Unit using `==' will always yield false
- 0 == (c = 1)
- ^
-checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false
- 1 == "abc"
- ^
-checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false
- Some(1) == 1 // as above
- ^
-checksensible.scala:38: error: comparing a fresh object using `==' will always yield false
- new AnyRef == 1
- ^
-checksensible.scala:41: error: comparing values of types Int and Boolean using `==' will always yield false
- 1 == (new java.lang.Boolean(true))
- ^
-checksensible.scala:43: error: comparing values of types Int and Boolean using `!=' will always yield true
- 1 != true
- ^
-checksensible.scala:44: error: comparing values of types Unit and Boolean using `==' will always yield false
- () == true
- ^
-checksensible.scala:45: error: comparing values of types Unit and Unit using `==' will always yield true
- () == ()
- ^
-checksensible.scala:46: error: comparing values of types Unit and Unit using `==' will always yield true
- () == println
- ^
-checksensible.scala:47: error: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
- () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
- ^
-checksensible.scala:48: error: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
- scala.runtime.BoxedUnit.UNIT != ()
- ^
-checksensible.scala:51: error: comparing values of types Int and Unit using `!=' will always yield true
- (1 != println)
- ^
-checksensible.scala:52: error: comparing values of types Int and Symbol using `!=' will always yield true
- (1 != 'sym)
- ^
-checksensible.scala:58: error: comparing a fresh object using `==' will always yield false
- ((x: Int) => x + 1) == null
- ^
-checksensible.scala:59: error: comparing a fresh object using `==' will always yield false
- Bep == ((_: Int) + 1)
- ^
-checksensible.scala:61: error: comparing a fresh object using `==' will always yield false
- new Object == new Object
- ^
-checksensible.scala:62: error: comparing a fresh object using `==' will always yield false
- new Object == "abc"
- ^
-checksensible.scala:63: error: comparing a fresh object using `!=' will always yield true
- new Exception() != new Exception()
- ^
-checksensible.scala:66: error: comparing values of types Int and Null using `==' will always yield false
- if (foo.length == null) "plante" else "plante pas"
- ^
-checksensible.scala:71: error: comparing values of types Bip and Bop using `==' will always yield false
- (x1 == x2)
- ^
-checksensible.scala:81: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
- c3 == z1
- ^
-checksensible.scala:82: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
- z1 == c3
- ^
-checksensible.scala:83: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
- z1 != c3
- ^
-checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
- c3 != "abc"
- ^
-checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true
- while ((c = in.read) != -1)
- ^
-33 errors found
+checksensible.scala:13: error: comparing a fresh object using `eq' will always yield false
+ (new AnyRef) eq (new AnyRef)
+ ^
+checksensible.scala:14: error: comparing a fresh object using `ne' will always yield true
+ (new AnyRef) ne (new AnyRef)
+ ^
+checksensible.scala:15: error: comparing a fresh object using `eq' will always yield false
+ Shmoopie eq (new AnyRef)
+ ^
+checksensible.scala:16: error: comparing a fresh object using `eq' will always yield false
+ (Shmoopie: AnyRef) eq (new AnyRef)
+ ^
+checksensible.scala:17: error: comparing a fresh object using `eq' will always yield false
+ (new AnyRef) eq Shmoopie
+ ^
+checksensible.scala:18: error: comparing a fresh object using `eq' will always yield false
+ (new AnyRef) eq null
+ ^
+checksensible.scala:19: error: comparing a fresh object using `eq' will always yield false
+ null eq new AnyRef
+ ^
+checksensible.scala:26: error: comparing values of types Unit and Int using `==' will always yield false
+ (c = 1) == 0
+ ^
+checksensible.scala:27: error: comparing values of types Int and Unit using `==' will always yield false
+ 0 == (c = 1)
+ ^
+checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false
+ 1 == "abc"
+ ^
+checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false
+ Some(1) == 1 // as above
+ ^
+checksensible.scala:38: error: comparing a fresh object using `==' will always yield false
+ new AnyRef == 1
+ ^
+checksensible.scala:41: error: comparing values of types Int and Boolean using `==' will always yield false
+ 1 == (new java.lang.Boolean(true))
+ ^
+checksensible.scala:43: error: comparing values of types Int and Boolean using `!=' will always yield true
+ 1 != true
+ ^
+checksensible.scala:44: error: comparing values of types Unit and Boolean using `==' will always yield false
+ () == true
+ ^
+checksensible.scala:45: error: comparing values of types Unit and Unit using `==' will always yield true
+ () == ()
+ ^
+checksensible.scala:46: error: comparing values of types Unit and Unit using `==' will always yield true
+ () == println
+ ^
+checksensible.scala:47: error: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
+ () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
+ ^
+checksensible.scala:48: error: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
+ scala.runtime.BoxedUnit.UNIT != ()
+ ^
+checksensible.scala:51: error: comparing values of types Int and Unit using `!=' will always yield true
+ (1 != println)
+ ^
+checksensible.scala:52: error: comparing values of types Int and Symbol using `!=' will always yield true
+ (1 != 'sym)
+ ^
+checksensible.scala:58: error: comparing a fresh object using `==' will always yield false
+ ((x: Int) => x + 1) == null
+ ^
+checksensible.scala:59: error: comparing a fresh object using `==' will always yield false
+ Bep == ((_: Int) + 1)
+ ^
+checksensible.scala:61: error: comparing a fresh object using `==' will always yield false
+ new Object == new Object
+ ^
+checksensible.scala:62: error: comparing a fresh object using `==' will always yield false
+ new Object == "abc"
+ ^
+checksensible.scala:63: error: comparing a fresh object using `!=' will always yield true
+ new Exception() != new Exception()
+ ^
+checksensible.scala:66: error: comparing values of types Int and Null using `==' will always yield false
+ if (foo.length == null) "plante" else "plante pas"
+ ^
+checksensible.scala:71: error: comparing values of types Bip and Bop using `==' will always yield false
+ (x1 == x2)
+ ^
+checksensible.scala:81: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
+ c3 == z1
+ ^
+checksensible.scala:82: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
+ z1 == c3
+ ^
+checksensible.scala:83: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
+ z1 != c3
+ ^
+checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
+ c3 != "abc"
+ ^
+checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true
+ while ((c = in.read) != -1)
+ ^
+33 errors found
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
index 841e893249..e10308a66f 100644
--- a/test/files/neg/classmanifests_new_deprecations.check
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -1,61 +1,61 @@
-classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
- def cm1[T: ClassManifest] = ???
- ^
-classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
- def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
- ^
-classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
- val cm3: ClassManifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
- val cm3: ClassManifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
- def rcm1[T: scala.reflect.ClassManifest] = ???
- ^
-classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
- def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
- ^
-classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
- val rcm3: scala.reflect.ClassManifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
- val rcm3: scala.reflect.ClassManifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
- type CM[T] = ClassManifest[T]
- ^
-classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
- type RCM[T] = scala.reflect.ClassManifest[T]
- ^
-classmanifests_new_deprecations.scala:20: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- def m1[T: Manifest] = ???
- ^
-classmanifests_new_deprecations.scala:21: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- def m2[T](implicit evidence$1: Manifest[T]) = ???
- ^
-classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- val m3: Manifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- val m3: Manifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:24: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- def rm1[T: scala.reflect.Manifest] = ???
- ^
-classmanifests_new_deprecations.scala:25: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ???
- ^
-classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- val rm3: scala.reflect.Manifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- val rm3: scala.reflect.Manifest[Int] = null
- ^
-classmanifests_new_deprecations.scala:28: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- type M[T] = Manifest[T]
- ^
-classmanifests_new_deprecations.scala:33: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
- type RM[T] = scala.reflect.Manifest[T]
- ^
-20 errors found
+classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ def cm1[T: ClassManifest] = ???
+ ^
+classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ val cm3: ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ val cm3: ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ def rcm1[T: scala.reflect.ClassManifest] = ???
+ ^
+classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ type CM[T] = ClassManifest[T]
+ ^
+classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ type RCM[T] = scala.reflect.ClassManifest[T]
+ ^
+classmanifests_new_deprecations.scala:20: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def m1[T: Manifest] = ???
+ ^
+classmanifests_new_deprecations.scala:21: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def m2[T](implicit evidence$1: Manifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val m3: Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val m3: Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:24: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def rm1[T: scala.reflect.Manifest] = ???
+ ^
+classmanifests_new_deprecations.scala:25: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val rm3: scala.reflect.Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ val rm3: scala.reflect.Manifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:28: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ type M[T] = Manifest[T]
+ ^
+classmanifests_new_deprecations.scala:33: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead
+ type RM[T] = scala.reflect.Manifest[T]
+ ^
+20 errors found
diff --git a/test/files/neg/classtags_contextbound_a.check b/test/files/neg/classtags_contextbound_a.check
index f4b6ff5af1..5edb7f9a5a 100644
--- a/test/files/neg/classtags_contextbound_a.check
+++ b/test/files/neg/classtags_contextbound_a.check
@@ -1,4 +1,4 @@
-classtags_contextbound_a.scala:2: error: No ClassTag available for T
- def foo[T] = Array[T]()
- ^
-one error found
+classtags_contextbound_a.scala:2: error: No ClassTag available for T
+ def foo[T] = Array[T]()
+ ^
+one error found
diff --git a/test/files/neg/classtags_contextbound_b.check b/test/files/neg/classtags_contextbound_b.check
index e08ab504bc..e17ab8b0d1 100644
--- a/test/files/neg/classtags_contextbound_b.check
+++ b/test/files/neg/classtags_contextbound_b.check
@@ -1,4 +1,4 @@
-classtags_contextbound_b.scala:5: error: No ClassTag available for T
- def foo[T] = mkArray[T]
- ^
-one error found
+classtags_contextbound_b.scala:5: error: No ClassTag available for T
+ def foo[T] = mkArray[T]
+ ^
+one error found
diff --git a/test/files/neg/classtags_contextbound_c.check b/test/files/neg/classtags_contextbound_c.check
index f8768eea2b..e8666f7a10 100644
--- a/test/files/neg/classtags_contextbound_c.check
+++ b/test/files/neg/classtags_contextbound_c.check
@@ -1,4 +1,4 @@
-classtags_contextbound_c.scala:4: error: No ClassTag available for T
- def mkArray[T] = Array[T]()
- ^
-one error found
+classtags_contextbound_c.scala:4: error: No ClassTag available for T
+ def mkArray[T] = Array[T]()
+ ^
+one error found
diff --git a/test/files/neg/classtags_dont_use_typetags.check b/test/files/neg/classtags_dont_use_typetags.check
index 24585e7c3f..4f728d267d 100644
--- a/test/files/neg/classtags_dont_use_typetags.check
+++ b/test/files/neg/classtags_dont_use_typetags.check
@@ -1,4 +1,4 @@
-classtags_dont_use_typetags.scala:4: error: No ClassTag available for T
- def foo[T: TypeTag] = Array[T]()
- ^
-one error found
+classtags_dont_use_typetags.scala:4: error: No ClassTag available for T
+ def foo[T: TypeTag] = Array[T]()
+ ^
+one error found
diff --git a/test/files/neg/deadline-inf-illegal.check b/test/files/neg/deadline-inf-illegal.check
new file mode 100644
index 0000000000..530d2b2443
--- /dev/null
+++ b/test/files/neg/deadline-inf-illegal.check
@@ -0,0 +1,15 @@
+deadline-inf-illegal.scala:5: error: value fromNow is not a member of scala.concurrent.duration.Duration
+ d.fromNow
+ ^
+deadline-inf-illegal.scala:6: error: type mismatch;
+ found : scala.concurrent.duration.Duration
+ required: scala.concurrent.duration.FiniteDuration
+ Deadline.now + d
+ ^
+deadline-inf-illegal.scala:7: error: overloaded method value - with alternatives:
+ (other: scala.concurrent.duration.Deadline)scala.concurrent.duration.FiniteDuration <and>
+ (other: scala.concurrent.duration.FiniteDuration)scala.concurrent.duration.Deadline
+ cannot be applied to (scala.concurrent.duration.Duration)
+ Deadline.now - d
+ ^
+three errors found
diff --git a/test/files/neg/deadline-inf-illegal.scala b/test/files/neg/deadline-inf-illegal.scala
new file mode 100644
index 0000000000..942cea7014
--- /dev/null
+++ b/test/files/neg/deadline-inf-illegal.scala
@@ -0,0 +1,8 @@
+import concurrent.duration.{ Deadline, Duration }
+
+class T {
+ val d: Duration = Duration.Zero
+ d.fromNow
+ Deadline.now + d
+ Deadline.now - d
+}
diff --git a/test/files/neg/implicit-shadow.check b/test/files/neg/implicit-shadow.check
new file mode 100644
index 0000000000..042fca867a
--- /dev/null
+++ b/test/files/neg/implicit-shadow.check
@@ -0,0 +1,11 @@
+implicit-shadow.scala:4: <i2s: error> is not a valid implicit value for Int(1) => ?{def isEmpty: ?} because:
+reference to i2s is ambiguous;
+it is imported twice in the same scope by
+import C._
+and import B._
+ 1.isEmpty
+ ^
+implicit-shadow.scala:4: error: value isEmpty is not a member of Int
+ 1.isEmpty
+ ^
+one error found
diff --git a/test/files/neg/implicit-shadow.flags b/test/files/neg/implicit-shadow.flags
new file mode 100644
index 0000000000..44842a9d65
--- /dev/null
+++ b/test/files/neg/implicit-shadow.flags
@@ -0,0 +1 @@
+-Xlog-implicits
diff --git a/test/files/neg/implicit-shadow.scala b/test/files/neg/implicit-shadow.scala
new file mode 100644
index 0000000000..ffd34b6408
--- /dev/null
+++ b/test/files/neg/implicit-shadow.scala
@@ -0,0 +1,13 @@
+object Test {
+ import B._, C._
+
+ 1.isEmpty
+}
+
+trait A {
+ implicit def i2s(i: Int): String = ""
+}
+
+object B extends A
+
+object C extends A \ No newline at end of file
diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.check b/test/files/neg/interop_abstypetags_arenot_classmanifests.check
index 15bdcfc5b1..d15e33346c 100644
--- a/test/files/neg/interop_abstypetags_arenot_classmanifests.check
+++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.check
@@ -1,4 +1,4 @@
-interop_abstypetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
- println(classManifest[T])
- ^
-one error found
+interop_abstypetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
+ println(classManifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.scala b/test/files/neg/interop_abstypetags_arenot_classmanifests.scala
index c8567be5d4..5d88c90ffd 100644
--- a/test/files/neg/interop_abstypetags_arenot_classmanifests.scala
+++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.scala
@@ -1,11 +1,11 @@
import scala.reflect.runtime.universe._
object Test extends App {
- def absTypeTagIsnotClassManifest[T: AbsTypeTag] = {
+ def weakTypeTagIsnotClassManifest[T: WeakTypeTag] = {
println(classManifest[T])
}
- absTypeTagIsnotClassManifest[Int]
- absTypeTagIsnotClassManifest[String]
- absTypeTagIsnotClassManifest[Array[Int]]
+ weakTypeTagIsnotClassManifest[Int]
+ weakTypeTagIsnotClassManifest[String]
+ weakTypeTagIsnotClassManifest[Array[Int]]
} \ No newline at end of file
diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.check b/test/files/neg/interop_abstypetags_arenot_classtags.check
index 637db83e57..3aa7a50b50 100644
--- a/test/files/neg/interop_abstypetags_arenot_classtags.check
+++ b/test/files/neg/interop_abstypetags_arenot_classtags.check
@@ -1,4 +1,4 @@
-interop_abstypetags_arenot_classtags.scala:6: error: No ClassTag available for T
- println(classTag[T])
- ^
-one error found
+interop_abstypetags_arenot_classtags.scala:6: error: No ClassTag available for T
+ println(classTag[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.scala b/test/files/neg/interop_abstypetags_arenot_classtags.scala
index e3911c1588..de1f8657b6 100644
--- a/test/files/neg/interop_abstypetags_arenot_classtags.scala
+++ b/test/files/neg/interop_abstypetags_arenot_classtags.scala
@@ -2,11 +2,11 @@ import scala.reflect.runtime.universe._
import scala.reflect.{ClassTag, classTag}
object Test extends App {
- def absTypeTagIsnotClassTag[T: AbsTypeTag] = {
+ def weakTypeTagIsnotClassTag[T: WeakTypeTag] = {
println(classTag[T])
}
- absTypeTagIsnotClassTag[Int]
- absTypeTagIsnotClassTag[String]
- absTypeTagIsnotClassTag[Array[Int]]
+ weakTypeTagIsnotClassTag[Int]
+ weakTypeTagIsnotClassTag[String]
+ weakTypeTagIsnotClassTag[Array[Int]]
} \ No newline at end of file
diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.check b/test/files/neg/interop_abstypetags_arenot_manifests.check
index c80635543f..5916b68742 100644
--- a/test/files/neg/interop_abstypetags_arenot_manifests.check
+++ b/test/files/neg/interop_abstypetags_arenot_manifests.check
@@ -1,4 +1,4 @@
-interop_abstypetags_arenot_manifests.scala:5: error: No Manifest available for T.
- println(manifest[T])
- ^
-one error found
+interop_abstypetags_arenot_manifests.scala:5: error: No Manifest available for T.
+ println(manifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.scala b/test/files/neg/interop_abstypetags_arenot_manifests.scala
index 77a95048fd..1ca3673ce4 100644
--- a/test/files/neg/interop_abstypetags_arenot_manifests.scala
+++ b/test/files/neg/interop_abstypetags_arenot_manifests.scala
@@ -1,11 +1,11 @@
import scala.reflect.runtime.universe._
object Test extends App {
- def absTypeTagIsnotManifest[T: AbsTypeTag] = {
+ def weakTypeTagIsnotManifest[T: WeakTypeTag] = {
println(manifest[T])
}
- absTypeTagIsnotManifest[Int]
- absTypeTagIsnotManifest[String]
- absTypeTagIsnotManifest[Array[Int]]
+ weakTypeTagIsnotManifest[Int]
+ weakTypeTagIsnotManifest[String]
+ weakTypeTagIsnotManifest[Array[Int]]
} \ No newline at end of file
diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.check b/test/files/neg/interop_classmanifests_arenot_typetags.check
index 6675fd9eae..db8e57981a 100644
--- a/test/files/neg/interop_classmanifests_arenot_typetags.check
+++ b/test/files/neg/interop_classmanifests_arenot_typetags.check
@@ -1,4 +1,4 @@
-interop_classmanifests_arenot_typetags.scala:5: error: No TypeTag available for T
- println(implicitly[TypeTag[T]])
- ^
-one error found
+interop_classmanifests_arenot_typetags.scala:5: error: No TypeTag available for T
+ println(implicitly[TypeTag[T]])
+ ^
+one error found
diff --git a/test/files/neg/interop_classtags_arenot_manifests.check b/test/files/neg/interop_classtags_arenot_manifests.check
index 6982f0b805..fa805b5918 100644
--- a/test/files/neg/interop_classtags_arenot_manifests.check
+++ b/test/files/neg/interop_classtags_arenot_manifests.check
@@ -1,4 +1,4 @@
-interop_classtags_arenot_manifests.scala:5: error: No Manifest available for T.
- println(manifest[T])
- ^
-one error found
+interop_classtags_arenot_manifests.scala:5: error: No Manifest available for T.
+ println(manifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.check b/test/files/neg/interop_typetags_arenot_classmanifests.check
index b404f94d69..88fb1647e5 100644
--- a/test/files/neg/interop_typetags_arenot_classmanifests.check
+++ b/test/files/neg/interop_typetags_arenot_classmanifests.check
@@ -1,4 +1,4 @@
-interop_typetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
- println(classManifest[T])
- ^
-one error found
+interop_typetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
+ println(classManifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_typetags_arenot_classtags.check b/test/files/neg/interop_typetags_arenot_classtags.check
index 673ad2f941..1d1fb15f9e 100644
--- a/test/files/neg/interop_typetags_arenot_classtags.check
+++ b/test/files/neg/interop_typetags_arenot_classtags.check
@@ -1,4 +1,4 @@
-interop_typetags_arenot_classtags.scala:6: error: No ClassTag available for T
- println(classTag[T])
- ^
-one error found
+interop_typetags_arenot_classtags.scala:6: error: No ClassTag available for T
+ println(classTag[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check
index a95f1ad308..ba744a8837 100644
--- a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check
+++ b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check
@@ -1,6 +1,6 @@
-interop_typetags_without_classtags_arenot_manifests.scala:6: error: to create a manifest here, it is necessary to interoperate with the type tag `evidence$1` in scope.
-however typetag -> manifest conversion requires a class tag for the corresponding type to be present.
-to proceed add a class tag to the type `T` (e.g. by introducing a context bound) and recompile.
- println(manifest[T])
- ^
-one error found
+interop_typetags_without_classtags_arenot_manifests.scala:6: error: to create a manifest here, it is necessary to interoperate with the type tag `evidence$1` in scope.
+however typetag -> manifest conversion requires a class tag for the corresponding type to be present.
+to proceed add a class tag to the type `T` (e.g. by introducing a context bound) and recompile.
+ println(manifest[T])
+ ^
+one error found
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.check b/test/files/neg/javaConversions-2.10-ambiguity.check
new file mode 100644
index 0000000000..c064a22964
--- /dev/null
+++ b/test/files/neg/javaConversions-2.10-ambiguity.check
@@ -0,0 +1,6 @@
+javaConversions-2.10-ambiguity.scala:8: error: type mismatch;
+ found : scala.collection.concurrent.Map[String,String]
+ required: scala.collection.mutable.ConcurrentMap[String,String]
+ assertType[mutable.ConcurrentMap[String, String]](a)
+ ^
+one error found
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.scala b/test/files/neg/javaConversions-2.10-ambiguity.scala
new file mode 100644
index 0000000000..e856846a29
--- /dev/null
+++ b/test/files/neg/javaConversions-2.10-ambiguity.scala
@@ -0,0 +1,10 @@
+import collection.{JavaConversions, mutable, concurrent}
+import JavaConversions._
+import java.util.concurrent.{ConcurrentHashMap => CHM}
+
+object Bar {
+ def assertType[T](t: T) = t
+ val a = new CHM[String, String]() += (("", ""))
+ assertType[mutable.ConcurrentMap[String, String]](a)
+}
+// vim: set et:
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
index 67b00c0ec5..c7b58d70d2 100644
--- a/test/files/neg/macro-basic-mamdmi.check
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -1,4 +1,4 @@
-Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
- println(foo(2) + Macros.bar(2) * new Macros().quux(4))
- ^
-one error found
+Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+ ^
+one error found
diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check
index f8a7e519df..22b667c390 100644
--- a/test/files/neg/macro-deprecate-idents.check
+++ b/test/files/neg/macro-deprecate-idents.check
@@ -1,52 +1,52 @@
-macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is deprecated
- val macro = ???
- ^
-macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is deprecated
- var macro = ???
- ^
-macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is deprecated
- type macro = Int
- ^
-macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is deprecated
- class macro
- ^
-macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is deprecated
- class macro
- ^
-macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is deprecated
- object macro
- ^
-macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is deprecated
- object macro
- ^
-macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is deprecated
- trait macro
- ^
-macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is deprecated
- trait macro
- ^
-macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is deprecated
-package macro {
- ^
-macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is deprecated
- package macro.bar {
- ^
-macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is deprecated
- package macro.foo {
- ^
-macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is deprecated
- val Some(macro) = Some(42)
- ^
-macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is deprecated
- macro match {
- ^
-macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
- case macro => println(macro)
- ^
-macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
- case macro => println(macro)
- ^
-macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is deprecated
- def macro = 2
- ^
-17 errors found
+macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is deprecated
+ val macro = ???
+ ^
+macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is deprecated
+ var macro = ???
+ ^
+macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is deprecated
+ type macro = Int
+ ^
+macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is deprecated
+ class macro
+ ^
+macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is deprecated
+ class macro
+ ^
+macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is deprecated
+ object macro
+ ^
+macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is deprecated
+ object macro
+ ^
+macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is deprecated
+ trait macro
+ ^
+macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is deprecated
+ trait macro
+ ^
+macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is deprecated
+package macro {
+ ^
+macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is deprecated
+ package macro.bar {
+ ^
+macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is deprecated
+ package macro.foo {
+ ^
+macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is deprecated
+ val Some(macro) = Some(42)
+ ^
+macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is deprecated
+ macro match {
+ ^
+macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+ case macro => println(macro)
+ ^
+macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+ case macro => println(macro)
+ ^
+macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is deprecated
+ def macro = 2
+ ^
+17 errors found
diff --git a/test/files/neg/macro-invalidimpl-a.check b/test/files/neg/macro-invalidimpl-a.check
index 855fe2d169..7f11f3b865 100644
--- a/test/files/neg/macro-invalidimpl-a.check
+++ b/test/files/neg/macro-invalidimpl-a.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro impls.foo
- ^
-one error found
+Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-b.check b/test/files/neg/macro-invalidimpl-b.check
index 855fe2d169..7f11f3b865 100644
--- a/test/files/neg/macro-invalidimpl-b.check
+++ b/test/files/neg/macro-invalidimpl-b.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro impls.foo
- ^
-one error found
+Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-c.check b/test/files/neg/macro-invalidimpl-c.check
index 722ec3c7bd..9e0181c0a3 100644
--- a/test/files/neg/macro-invalidimpl-c.check
+++ b/test/files/neg/macro-invalidimpl-c.check
@@ -1,4 +1,4 @@
-Impls_Macros_1.scala:8: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro Impls.foo
- ^
-one error found
+Impls_Macros_1.scala:8: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-d.check b/test/files/neg/macro-invalidimpl-d.check
index 6fedfa74fc..76a5ba9c8c 100644
--- a/test/files/neg/macro-invalidimpl-d.check
+++ b/test/files/neg/macro-invalidimpl-d.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:2: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-e.check b/test/files/neg/macro-invalidimpl-e.check
index 5cfcf85625..e0910b2899 100644
--- a/test/files/neg/macro-invalidimpl-e.check
+++ b/test/files/neg/macro-invalidimpl-e.check
@@ -1,13 +1,13 @@
-Macros_Test_2.scala:2: error: ambiguous reference to overloaded definition,
-both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
-and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
-match expected type ?
- def foo(x: Any) = macro Impls.foo
- ^
-Macros_Test_2.scala:3: error: ambiguous reference to overloaded definition,
-both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
-and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
-match expected type ?
- def foo(x: Any, y: Any) = macro Impls.foo
- ^
-two errors found
+Macros_Test_2.scala:2: error: ambiguous reference to overloaded definition,
+both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+ def foo(x: Any) = macro Impls.foo
+ ^
+Macros_Test_2.scala:3: error: ambiguous reference to overloaded definition,
+both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+ def foo(x: Any, y: Any) = macro Impls.foo
+ ^
+two errors found
diff --git a/test/files/neg/macro-invalidimpl-f.check b/test/files/neg/macro-invalidimpl-f.check
index 14f1e25287..4e5851f566 100644
--- a/test/files/neg/macro-invalidimpl-f.check
+++ b/test/files/neg/macro-invalidimpl-f.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
- found : (c: scala.reflect.macros.Context): c.Expr[Unit]
-number of parameter sections differ
- def bar1() = macro Impls.fooNullary
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context): c.Expr[Unit]
+number of parameter sections differ
+ def bar1() = macro Impls.fooNullary
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-g.check b/test/files/neg/macro-invalidimpl-g.check
index a886436d35..7342f7336f 100644
--- a/test/files/neg/macro-invalidimpl-g.check
+++ b/test/files/neg/macro-invalidimpl-g.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Unit]
- found : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
-number of parameter sections differ
- def foo1 = macro Impls.fooEmpty
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+number of parameter sections differ
+ def foo1 = macro Impls.fooEmpty
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-h.check b/test/files/neg/macro-invalidimpl-h.check
index cc7fc794d3..ea76e1aeac 100644
--- a/test/files/neg/macro-invalidimpl-h.check
+++ b/test/files/neg/macro-invalidimpl-h.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:2: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
- def foo = macro Impls.foo[String]
- ^
-one error found
+Macros_Test_2.scala:2: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
+ def foo = macro Impls.foo[String]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-i.check b/test/files/neg/macro-invalidimpl-i.check
new file mode 100644
index 0000000000..846ed8d134
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:4: error: macro implementation must be public
+ def foo = macro Impls.impl
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-badbounds.flags b/test/files/neg/macro-invalidimpl-i.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidusage-badbounds.flags
+++ b/test/files/neg/macro-invalidimpl-i.flags
diff --git a/test/files/neg/macro-invalidimpl-i/Impls_1.scala b/test/files/neg/macro-invalidimpl-i/Impls_1.scala
new file mode 100644
index 0000000000..c35d8ab3c1
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i/Impls_1.scala
@@ -0,0 +1,7 @@
+package foo
+
+import scala.reflect.macros.Context
+
+object Impls {
+ private[foo] def impl(c: Context) = ???
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
new file mode 100644
index 0000000000..fb129c70be
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+package foo
+
+object Test extends App {
+ def foo = macro Impls.impl
+}
diff --git a/test/files/neg/macro-invalidret-nontree.check b/test/files/neg/macro-invalidret-nontree.check
index 78ab08df3e..6d8336d06d 100644
--- a/test/files/neg/macro-invalidret-nontree.check
+++ b/test/files/neg/macro-invalidret-nontree.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found : (c: scala.reflect.macros.Context): Int
-type mismatch for return type: Int does not conform to c.Expr[Any]
- def foo = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): Int
+type mismatch for return type: Int does not conform to c.Expr[Any]
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.check b/test/files/neg/macro-invalidret-nonuniversetree.check
index 09df2c0a92..089bfd0dc9 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree.check
+++ b/test/files/neg/macro-invalidret-nonuniversetree.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found : (c: scala.reflect.macros.Context): reflect.basis.Literal
-type mismatch for return type: reflect.basis.Literal does not conform to c.Expr[Any]
- def foo = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): reflect.runtime.universe.Literal
+type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any]
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
index 8311d474c2..f98376a2ba 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
+++ b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
@@ -1,5 +1,6 @@
import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.runtime.{universe => ru}
object Impls {
- def foo(c: Ctx) = scala.reflect.basis.Literal(scala.reflect.basis.Constant(42))
+ def foo(c: Ctx) = ru.Literal(ru.Constant(42))
}
diff --git a/test/files/neg/macro-invalidshape-a.check b/test/files/neg/macro-invalidshape-a.check
index 246b5c3226..f38a90819e 100644
--- a/test/files/neg/macro-invalidshape-a.check
+++ b/test/files/neg/macro-invalidshape-a.check
@@ -1,6 +1,5 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro <reference to implementation object>.<implementation method name>
- or : macro <implementation method name>
- def foo(x: Any) = macro 2
- ^
-one error found
+Macros_Test_2.scala:2: error: macro body has wrong shape:
+ required: macro [<implementation object>].<method name>[[<type args>]]
+ def foo(x: Any) = macro 2
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidshape-b.check b/test/files/neg/macro-invalidshape-b.check
index 59701d023b..976685c6bd 100644
--- a/test/files/neg/macro-invalidshape-b.check
+++ b/test/files/neg/macro-invalidshape-b.check
@@ -1,6 +1,5 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro <reference to implementation object>.<implementation method name>
- or : macro <implementation method name>
- def foo(x: Any) = macro Impls.foo(null)(null)
- ^
-one error found
+Macros_Test_2.scala:2: error: macro body has wrong shape:
+ required: macro [<implementation object>].<method name>[[<type args>]]
+ def foo(x: Any) = macro Impls.foo(null)(null)
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidshape-c.check b/test/files/neg/macro-invalidshape-c.check
index 84d8c35222..0b2e9cfe4f 100644
--- a/test/files/neg/macro-invalidshape-c.check
+++ b/test/files/neg/macro-invalidshape-c.check
@@ -1,6 +1,9 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro <reference to implementation object>.<implementation method name>
- or : macro <implementation method name>
- def foo(x: Any) = macro {2; Impls.foo}
- ^
-one error found
+Macros_Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ def foo(x: Any) = macro {2; Impls.foo}
+ ^
+Macros_Test_2.scala:2: error: missing arguments for method foo in object Impls;
+follow this method with `_' if you want to treat it as a partially applied function
+ def foo(x: Any) = macro {2; Impls.foo}
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/macro-invalidshape-d.check b/test/files/neg/macro-invalidshape-d.check
index f0d77e2f2d..e43a2ca0ab 100644
--- a/test/files/neg/macro-invalidshape-d.check
+++ b/test/files/neg/macro-invalidshape-d.check
@@ -1,8 +1,8 @@
-Macros_Test_2.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated
- def foo(x: Any) = {2; macro Impls.foo}
- ^
-Macros_Test_2.scala:2: error: ';' expected but '.' found.
- def foo(x: Any) = {2; macro Impls.foo}
- ^
-one warning found
-one error found
+Macros_Test_2.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated
+ def foo(x: Any) = {2; macro Impls.foo}
+ ^
+Macros_Test_2.scala:2: error: ';' expected but '.' found.
+ def foo(x: Any) = {2; macro Impls.foo}
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/macro-invalidsig-context-bounds.check b/test/files/neg/macro-invalidsig-context-bounds.check
index 894eabc442..43b8c23b35 100644
--- a/test/files/neg/macro-invalidsig-context-bounds.check
+++ b/test/files/neg/macro-invalidsig-context-bounds.check
@@ -1,4 +1,7 @@
-Impls_1.scala:5: error: macro implementations cannot have implicit parameters other than AbsTypeTag evidences
- def foo[U: c.AbsTypeTag: Numeric](c: Ctx) = {
- ^
-one error found
+Macros_Test_1.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(implicit evidence$2: Numeric[U]): c.universe.Literal
+macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+ def foo[U] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
index 5aa9a7eaf9..c066c485b1 100644
--- a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
+++ b/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
@@ -2,7 +2,7 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U: c.AbsTypeTag: Numeric](c: Ctx) = {
+ def foo[U: c.WeakTypeTag: Numeric](c: Ctx) = {
import c.universe._
Literal(Constant(42))
}
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc.check b/test/files/neg/macro-invalidsig-ctx-badargc.check
index 8a1ca6a8b1..1c14072a94 100644
--- a/test/files/neg/macro-invalidsig-ctx-badargc.check
+++ b/test/files/neg/macro-invalidsig-ctx-badargc.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found : : Nothing
-number of parameter sections differ
- def foo = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : : Nothing
+number of parameter sections differ
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype.check b/test/files/neg/macro-invalidsig-ctx-badtype.check
index 9e57ab8631..340ace6a38 100644
--- a/test/files/neg/macro-invalidsig-ctx-badtype.check
+++ b/test/files/neg/macro-invalidsig-ctx-badtype.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found : (c: scala.reflect.api.Universe): Nothing
-type mismatch for parameter c: scala.reflect.macros.Context does not conform to scala.reflect.api.Universe
- def foo = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.api.Universe): Nothing
+type mismatch for parameter c: scala.reflect.macros.Context does not conform to scala.reflect.api.Universe
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs.check b/test/files/neg/macro-invalidsig-ctx-badvarargs.check
index 37941a7dc9..a6478f03e3 100644
--- a/test/files/neg/macro-invalidsig-ctx-badvarargs.check
+++ b/test/files/neg/macro-invalidsig-ctx-badvarargs.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found : (cs: scala.reflect.macros.Context*): Nothing
-types incompatible for parameter cs: corresponding is not a vararg parameter
- def foo = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (cs: scala.reflect.macros.Context*): Nothing
+types incompatible for parameter cs: corresponding is not a vararg parameter
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx.check b/test/files/neg/macro-invalidsig-ctx-noctx.check
index 722fe9dfb6..b7dc9a449b 100644
--- a/test/files/neg/macro-invalidsig-ctx-noctx.check
+++ b/test/files/neg/macro-invalidsig-ctx-noctx.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Any]): c.Expr[Any]
- found : (c: scala.reflect.macros.Context): Nothing
-number of parameter sections differ
- def foo(x: Any) = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Any]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): Nothing
+number of parameter sections differ
+ def foo(x: Any) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-implicit-params.check b/test/files/neg/macro-invalidsig-implicit-params.check
index 029b8a4634..f210eb8a32 100644
--- a/test/files/neg/macro-invalidsig-implicit-params.check
+++ b/test/files/neg/macro-invalidsig-implicit-params.check
@@ -1,4 +1,7 @@
-Impls_Macros_1.scala:5: error: macro implementations cannot have implicit parameters other than AbsTypeTag evidences
- def foo_targs[T, U: c.AbsTypeTag](c: Ctx)(implicit x: c.Expr[Int]) = {
- ^
-one error found
+Impls_Macros_1.scala:18: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context)(implicit x: c.Expr[Int]): c.Expr[Unit]
+macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+ def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
index f724538993..845a168ff2 100644
--- a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
+++ b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
@@ -2,13 +2,13 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo_targs[T, U: c.AbsTypeTag](c: Ctx)(implicit x: c.Expr[Int]) = {
+ def foo_targs[T, U: c.WeakTypeTag](c: Ctx)(implicit x: c.Expr[Int]) = {
import c.{prefix => prefix}
import c.universe._
val body = Block(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + implicitly[c.AbsTypeTag[U]].tpe)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe)))),
Literal(Constant(())))
c.Expr[Unit](body)
}
diff --git a/test/files/neg/macro-invalidsig-params-badargc.check b/test/files/neg/macro-invalidsig-params-badargc.check
index ab4fb535c5..3f6d815b8e 100644
--- a/test/files/neg/macro-invalidsig-params-badargc.check
+++ b/test/files/neg/macro-invalidsig-params-badargc.check
@@ -1,7 +1,7 @@
-Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
- found : (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
-parameter lists have different length, found extra parameter y: c.Expr[Int]
- def foo(x: Int) = macro Impls.foo
- ^
-one error found
+Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
+parameter lists have different length, found extra parameter y: c.Expr[Int]
+ def foo(x: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-badtype.check b/test/files/neg/macro-invalidsig-params-badtype.check
index 007620a366..3ec40d7e5b 100644
--- a/test/files/neg/macro-invalidsig-params-badtype.check
+++ b/test/files/neg/macro-invalidsig-params-badtype.check
@@ -1,7 +1,7 @@
-Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
- found : (c: scala.reflect.macros.Context)(x: c.universe.Tree): Nothing
-type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Tree
- def foo(x: Int) = macro Impls.foo
- ^
-one error found
+Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(x: c.universe.Tree): Nothing
+type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Tree
+ def foo(x: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs.check b/test/files/neg/macro-invalidsig-params-badvarargs.check
index f0dcc24d03..50607ff52d 100644
--- a/test/files/neg/macro-invalidsig-params-badvarargs.check
+++ b/test/files/neg/macro-invalidsig-params-badvarargs.check
@@ -1,7 +1,7 @@
-Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
- found : (c: scala.reflect.macros.Context)(xs: c.Expr[Int]*): Nothing
-parameter lists have different length, required extra parameter y: c.Expr[Int]
- def foo(x: Int, y: Int) = macro Impls.foo
- ^
-one error found
+Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(xs: c.Expr[Int]*): Nothing
+parameter lists have different length, required extra parameter y: c.Expr[Int]
+ def foo(x: Int, y: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch.check b/test/files/neg/macro-invalidsig-params-namemismatch.check
index 00d781a2ac..4029bc8129 100644
--- a/test/files/neg/macro-invalidsig-params-namemismatch.check
+++ b/test/files/neg/macro-invalidsig-params-namemismatch.check
@@ -1,7 +1,7 @@
-Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
- found : (c: scala.reflect.macros.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
-parameter names differ: x != y
- def foo(x: Int, y: Int) = macro Impls.foo
- ^
-one error found
+Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
+parameter names differ: x != y
+ def foo(x: Int, y: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype.check b/test/files/neg/macro-invalidsig-tparams-badtype.check
index e5e8366ba4..e9f3547133 100644
--- a/test/files/neg/macro-invalidsig-tparams-badtype.check
+++ b/test/files/neg/macro-invalidsig-tparams-badtype.check
@@ -1,7 +1,7 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Any]
- found : (c: scala.reflect.macros.Context)(U: c.universe.Type): Nothing
-number of parameter sections differ
- def foo[U] = macro Impls.foo[U]
- ^
-one error found
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(U: c.universe.Type): Nothing
+number of parameter sections differ
+ def foo[U] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a.check b/test/files/neg/macro-invalidsig-tparams-bounds-a.check
index 6ba80b45c0..b6248a1c47 100644
--- a/test/files/neg/macro-invalidsig-tparams-bounds-a.check
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-a.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
- def foo[U] = macro Impls.foo[U]
- ^
-one error found
+Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+ def foo[U] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b.check b/test/files/neg/macro-invalidsig-tparams-bounds-b.check
index 50f0944acc..74eb522cdd 100644
--- a/test/files/neg/macro-invalidsig-tparams-bounds-b.check
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-b.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
- def foo[U <: Int] = macro Impls.foo[U]
- ^
-one error found
+Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+ def foo[U <: Int] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a.check b/test/files/neg/macro-invalidsig-tparams-notparams-a.check
index 5b4ef42ea5..61a5628b7e 100644
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a.check
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-a.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:2: error: macro implementation reference needs type arguments
- def foo = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:2: error: wrong number of type parameters for method foo: [U](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[U])Nothing
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
index afbe0f0915..f8b3c92869 100644
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
@@ -2,5 +2,5 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U: c.AbsTypeTag](c: Ctx) = ???
+ def foo[U: c.WeakTypeTag](c: Ctx) = ???
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b.check b/test/files/neg/macro-invalidsig-tparams-notparams-b.check
index 261e3b8293..a605af6beb 100644
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b.check
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-b.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:3: error: macro implementation reference needs type arguments
- def foo[V] = macro Impls.foo
- ^
-one error found
+Macros_Test_2.scala:3: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[T], implicit evidence$2: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+ def foo[V] = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
index b48f9d5f98..baf3aab9e3 100644
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
@@ -2,9 +2,9 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T: c.AbsTypeTag, U: c.AbsTypeTag, V](c: Ctx)(implicit V: c.AbsTypeTag[V]): c.Expr[Unit] = {
- println(implicitly[c.AbsTypeTag[T]])
- println(implicitly[c.AbsTypeTag[U]])
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ println(implicitly[c.WeakTypeTag[T]])
+ println(implicitly[c.WeakTypeTag[U]])
println(V)
c.literalUnit
}
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c.check b/test/files/neg/macro-invalidsig-tparams-notparams-c.check
index b1078fb233..0be0b6fad1 100644
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c.check
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-c.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:3: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.AbsTypeTag[T], implicit evidence$2: c.AbsTypeTag[U], implicit V: c.AbsTypeTag[V])c.Expr[Unit]
- def foo[V] = macro Impls.foo[V]
- ^
-one error found
+Macros_Test_2.scala:3: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[T], implicit evidence$2: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+ def foo[V] = macro Impls.foo[V]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
index 3506bdc789..44b4ed6ab3 100644
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
@@ -2,10 +2,10 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T: c.AbsTypeTag, U: c.AbsTypeTag, V](c: Ctx)(implicit V: c.AbsTypeTag[V]): c.Expr[Unit] = {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
- println(implicitly[c.AbsTypeTag[T]])
- println(implicitly[c.AbsTypeTag[U]])
+ println(implicitly[c.WeakTypeTag[T]])
+ println(implicitly[c.WeakTypeTag[U]])
println(V)
c.literalUnit
}
diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check
index 52beda5b61..294cfd0cf5 100644
--- a/test/files/neg/macro-invalidusage-badargs.check
+++ b/test/files/neg/macro-invalidusage-badargs.check
@@ -1,6 +1,6 @@
-Macros_Test_2.scala:7: error: type mismatch;
- found : String("42")
- required: Int
- val s: String = foo("42")
- ^
-one error found
+Macros_Test_2.scala:7: error: type mismatch;
+ found : String("42")
+ required: Int
+ val s: String = foo("42")
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-badbounds.check b/test/files/neg/macro-invalidusage-badbounds-a.check
index fd0b64533e..277f407d38 100644
--- a/test/files/neg/macro-invalidusage-badbounds.check
+++ b/test/files/neg/macro-invalidusage-badbounds-a.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:7: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String]
- foo[Int]
- ^
-one error found
+Macros_Test_2.scala:7: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String]
+ foo[Int]
+ ^
+one error found
diff --git a/test/files/run/macro-def-path-dependent-d.flags b/test/files/neg/macro-invalidusage-badbounds-a.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-def-path-dependent-d.flags
+++ b/test/files/neg/macro-invalidusage-badbounds-a.flags
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
new file mode 100644
index 0000000000..6ee71a3628
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx) = c.literalUnit
+}
diff --git a/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
index 3139599108..3139599108 100644
--- a/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
diff --git a/test/files/neg/macro-invalidusage-badtargs.check b/test/files/neg/macro-invalidusage-badtargs.check
index 61ef6f5af7..73801ab43e 100644
--- a/test/files/neg/macro-invalidusage-badtargs.check
+++ b/test/files/neg/macro-invalidusage-badtargs.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:7: error: macro method foo: (x: Int)Int does not take type parameters.
- val s: String = foo[String](42)
- ^
-one error found
+Macros_Test_2.scala:7: error: macro method foo: (x: Int)Int does not take type parameters.
+ val s: String = foo[String](42)
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax.check b/test/files/neg/macro-invalidusage-methodvaluesyntax.check
index 27b2023202..10046b2305 100644
--- a/test/files/neg/macro-invalidusage-methodvaluesyntax.check
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:6: error: macros cannot be eta-expanded
- val firstClassFoo = Macros.foo _
- ^
-one error found
+Macros_Test_2.scala:6: error: macros cannot be eta-expanded
+ val firstClassFoo = Macros.foo _
+ ^
+one error found
diff --git a/test/files/neg/macro-noexpand.check b/test/files/neg/macro-noexpand.check
index c829bbab71..2c176a99be 100644
--- a/test/files/neg/macro-noexpand.check
+++ b/test/files/neg/macro-noexpand.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:7: error: not found: value x
- foo(x)
- ^
-one error found
+Macros_Test_2.scala:7: error: not found: value x
+ foo(x)
+ ^
+one error found
diff --git a/test/files/neg/macro-nontypeablebody.check b/test/files/neg/macro-nontypeablebody.check
index 0cfc864df8..9f5831ab30 100644
--- a/test/files/neg/macro-nontypeablebody.check
+++ b/test/files/neg/macro-nontypeablebody.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:2: error: value foo2 is not a member of object Impls
- def foo(x: Any) = macro Impls.foo2
- ^
-one error found
+Macros_Test_2.scala:2: error: value foo2 is not a member of object Impls
+ def foo(x: Any) = macro Impls.foo2
+ ^
+one error found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
index 4d95dfc45c..895e0dca50 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
@@ -1,5 +1,5 @@
-Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
- macro method foo cannot override an abstract method
- def foo(x: Int) = macro Impls.impl
- ^
-one error found
+Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
+ macro method foo cannot override an abstract method
+ def foo(x: Int) = macro Impls.impl
+ ^
+one error found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
index 4d95dfc45c..895e0dca50 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
@@ -1,5 +1,5 @@
-Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
- macro method foo cannot override an abstract method
- def foo(x: Int) = macro Impls.impl
- ^
-one error found
+Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
+ macro method foo cannot override an abstract method
+ def foo(x: Int) = macro Impls.impl
+ ^
+one error found
diff --git a/test/files/neg/macro-override-method-overrides-macro.check b/test/files/neg/macro-override-method-overrides-macro.check
index 42edb0ff23..66dc11be96 100644
--- a/test/files/neg/macro-override-method-overrides-macro.check
+++ b/test/files/neg/macro-override-method-overrides-macro.check
@@ -1,5 +1,5 @@
-Macros_Test_2.scala:8: error: overriding macro method foo in class B of type (x: String)Unit;
- method foo cannot override a macro
- override def foo(x: String) = println("fooDString")
- ^
-one error found
+Macros_Test_2.scala:8: error: overriding macro method foo in class B of type (x: String)Unit;
+ method foo cannot override a macro
+ override def foo(x: String) = println("fooDString")
+ ^
+one error found
diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check
index 742d87b0a4..44efaae775 100644
--- a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check
+++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check
@@ -1,7 +1,7 @@
-Test.scala:5: error: No TypeTag available for C[T]
- println(implicitly[TypeTag[C[T]]])
- ^
-Test.scala:6: error: No TypeTag available for List[C[T]]
- println(implicitly[TypeTag[List[C[T]]]])
- ^
-two errors found
+Test.scala:5: error: No TypeTag available for C[T]
+ println(implicitly[TypeTag[C[T]]])
+ ^
+Test.scala:6: error: No TypeTag available for List[C[T]]
+ println(implicitly[TypeTag[List[C[T]]]])
+ ^
+two errors found
diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags.check b/test/files/neg/macro-reify-typetag-typeparams-notags.check
index db88260047..7c67b02aa6 100644
--- a/test/files/neg/macro-reify-typetag-typeparams-notags.check
+++ b/test/files/neg/macro-reify-typetag-typeparams-notags.check
@@ -1,7 +1,7 @@
-Test.scala:5: error: No TypeTag available for T
- println(implicitly[TypeTag[T]])
- ^
-Test.scala:6: error: No TypeTag available for List[T]
- println(implicitly[TypeTag[List[T]]])
- ^
-two errors found
+Test.scala:5: error: No TypeTag available for T
+ println(implicitly[TypeTag[T]])
+ ^
+Test.scala:6: error: No TypeTag available for List[T]
+ println(implicitly[TypeTag[List[T]]])
+ ^
+two errors found
diff --git a/test/files/neg/macro-reify-typetag-useabstypetag.check b/test/files/neg/macro-reify-typetag-useabstypetag.check
index db88260047..7c67b02aa6 100644
--- a/test/files/neg/macro-reify-typetag-useabstypetag.check
+++ b/test/files/neg/macro-reify-typetag-useabstypetag.check
@@ -1,7 +1,7 @@
-Test.scala:5: error: No TypeTag available for T
- println(implicitly[TypeTag[T]])
- ^
-Test.scala:6: error: No TypeTag available for List[T]
- println(implicitly[TypeTag[List[T]]])
- ^
-two errors found
+Test.scala:5: error: No TypeTag available for T
+ println(implicitly[TypeTag[T]])
+ ^
+Test.scala:6: error: No TypeTag available for List[T]
+ println(implicitly[TypeTag[List[T]]])
+ ^
+two errors found
diff --git a/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala b/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala
index b66ad6c523..1e7fcb3f45 100644
--- a/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala
+++ b/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala
@@ -1,7 +1,7 @@
import scala.reflect.runtime.universe._
object Test extends App {
- def fooTypeTag[T: AbsTypeTag] = {
+ def fooTypeTag[T: WeakTypeTag] = {
println(implicitly[TypeTag[T]])
println(implicitly[TypeTag[List[T]]])
}
diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check
index fd2667dbb8..ae6c6c695a 100644
--- a/test/files/neg/macro-without-xmacros-a.check
+++ b/test/files/neg/macro-without-xmacros-a.check
@@ -1,17 +1,17 @@
-Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
-This can be achieved by adding the import clause 'import language.experimental.macros'
-or by setting the compiler option -language:experimental.macros.
-See the Scala docs for value scala.language.experimental.macros for a discussion
-why the feature needs to be explicitly enabled.
- def foo(x: Int): Int = macro foo_impl
- ^
-Macros_2.scala:7: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
- def bar(x: Int): Int = macro bar_impl
- ^
-Macros_2.scala:11: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
- def quux(x: Int): Int = macro quux_impl
- ^
-three errors found
+Macros_2.scala:5: error: macro definition needs to be enabled
+by making the implicit value language.experimental.macros visible.
+This can be achieved by adding the import clause 'import scala.language.experimental.macros'
+or by setting the compiler option -language:experimental.macros.
+See the Scala docs for value scala.language.experimental.macros for a discussion
+why the feature needs to be explicitly enabled.
+ def foo(x: Int): Int = macro foo_impl
+ ^
+Macros_2.scala:7: error: macro definition needs to be enabled
+by making the implicit value language.experimental.macros visible.
+ def bar(x: Int): Int = macro bar_impl
+ ^
+Macros_2.scala:11: error: macro definition needs to be enabled
+by making the implicit value language.experimental.macros visible.
+ def quux(x: Int): Int = macro quux_impl
+ ^
+three errors found
diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check
index 2d675b8319..c3cadcf36a 100644
--- a/test/files/neg/macro-without-xmacros-b.check
+++ b/test/files/neg/macro-without-xmacros-b.check
@@ -1,17 +1,17 @@
-Macros_2.scala:3: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
-This can be achieved by adding the import clause 'import language.experimental.macros'
-or by setting the compiler option -language:experimental.macros.
-See the Scala docs for value scala.language.experimental.macros for a discussion
-why the feature needs to be explicitly enabled.
- def foo(x: Int): Int = macro Impls.foo_impl
- ^
-Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
- def bar(x: Int): Int = macro Impls.bar_impl
- ^
-Macros_2.scala:9: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
- def quux(x: Int): Int = macro Impls.quux_impl
- ^
-three errors found
+Macros_2.scala:3: error: macro definition needs to be enabled
+by making the implicit value language.experimental.macros visible.
+This can be achieved by adding the import clause 'import scala.language.experimental.macros'
+or by setting the compiler option -language:experimental.macros.
+See the Scala docs for value scala.language.experimental.macros for a discussion
+why the feature needs to be explicitly enabled.
+ def foo(x: Int): Int = macro Impls.foo_impl
+ ^
+Macros_2.scala:5: error: macro definition needs to be enabled
+by making the implicit value language.experimental.macros visible.
+ def bar(x: Int): Int = macro Impls.bar_impl
+ ^
+Macros_2.scala:9: error: macro definition needs to be enabled
+by making the implicit value language.experimental.macros visible.
+ def quux(x: Int): Int = macro Impls.quux_impl
+ ^
+three errors found
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 2809350855..f3c45a6aa0 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -76,7 +76,7 @@ and method f in object t8 of type (a: Int, b: Object)String
match argument types (a: Int,b: String) and expected result type Any
println(t8.f(a = 0, b = "1")) // ambigous reference
^
-names-defaults-neg.scala:69: error: wrong number of arguments for <none>: (x: Int, y: String)A1
+names-defaults-neg.scala:69: error: wrong number of arguments for pattern A1(x: Int,y: String)
A1() match { case A1(_) => () }
^
names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int])
diff --git a/test/files/neg/newpat_unreachable.check b/test/files/neg/newpat_unreachable.check
new file mode 100644
index 0000000000..08453cac19
--- /dev/null
+++ b/test/files/neg/newpat_unreachable.check
@@ -0,0 +1,27 @@
+newpat_unreachable.scala:6: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` =>
+ case b => println("matched b")
+ ^
+newpat_unreachable.scala:7: error: unreachable code due to variable pattern 'b' on line 6
+If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` =>
+ case c => println("matched c")
+ ^
+newpat_unreachable.scala:8: error: unreachable code due to variable pattern 'b' on line 6
+If you intended to match against value d in class A, you must use backticks, like: case `d` =>
+ case d => println("matched d")
+ ^
+newpat_unreachable.scala:9: error: unreachable code due to variable pattern 'b' on line 6
+ case _ => println("matched neither")
+ ^
+newpat_unreachable.scala:22: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+If you intended to match against parameter b of method g, you must use backticks, like: case `b` =>
+ case b => 1
+ ^
+newpat_unreachable.scala:23: error: unreachable code due to variable pattern 'b' on line 22
+If you intended to match against parameter c of method h, you must use backticks, like: case `c` =>
+ case c => 2
+ ^
+newpat_unreachable.scala:24: error: unreachable code due to variable pattern 'b' on line 22
+ case _ => 3
+ ^
+7 errors found
diff --git a/test/files/neg/newpat_unreachable.flags b/test/files/neg/newpat_unreachable.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/newpat_unreachable.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/newpat_unreachable.scala b/test/files/neg/newpat_unreachable.scala
new file mode 100644
index 0000000000..c9cc85cec6
--- /dev/null
+++ b/test/files/neg/newpat_unreachable.scala
@@ -0,0 +1,29 @@
+object Test {
+ class A {
+ val d = 55
+
+ def contrivedExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case b => println("matched b")
+ case c => println("matched c")
+ case d => println("matched d")
+ case _ => println("matched neither")
+ }
+
+ def correctExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case `b` => println("matched b")
+ case `c` => println("matched c")
+ case `d` => println("matched d")
+ case _ => println("matched neither")
+ }
+
+ def f[A](a: A) = {
+ def g[B](b: B) = {
+ def h[C](c: C) = a match {
+ case b => 1
+ case c => 2
+ case _ => 3
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/neg/no-implicit-to-anyref.check b/test/files/neg/no-implicit-to-anyref.check
new file mode 100644
index 0000000000..d94b57a30a
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref.check
@@ -0,0 +1,28 @@
+no-implicit-to-anyref.scala:11: error: type mismatch;
+ found : Int(1)
+ required: AnyRef
+Note: an implicit exists from scala.Int => java.lang.Integer, but
+methods inherited from Object are rendered ambiguous. This is to avoid
+a blanket implicit which would convert any scala.Int to any AnyRef.
+You may wish to use a type ascription: `x: java.lang.Integer`.
+ 1: AnyRef
+ ^
+no-implicit-to-anyref.scala:17: error: type mismatch;
+ found : Any
+ required: AnyRef
+ (null: Any): AnyRef
+ ^
+no-implicit-to-anyref.scala:21: error: type mismatch;
+ found : AnyVal
+ required: AnyRef
+ (0: AnyVal): AnyRef
+ ^
+no-implicit-to-anyref.scala:27: error: type mismatch;
+ found : Test.AV
+ required: AnyRef
+Note that AV extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ new AV(0): AnyRef
+ ^
+four errors found
diff --git a/test/files/neg/no-implicit-to-anyref.scala b/test/files/neg/no-implicit-to-anyref.scala
new file mode 100644
index 0000000000..3e3d373e38
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref.scala
@@ -0,0 +1,29 @@
+// Checks that the state of standard implicits in Predef._ and scala._
+// doesn't allow us to unambiguously and implicitly convert AnyVal
+// and subtypes to AnyRef.
+//
+// In the days before value classes, this was precariously held be
+// the competing implicits Any => StringAdd and Any => StringFormat.
+// Since then, these have both become value classes, but seeing as
+// this happened simultaneously, we're still okay.
+object Test {
+ locally {
+ 1: AnyRef
+ }
+
+ locally {
+ // before this test case was added and ContextErrors was tweaked, this
+ // emitted: "Note that Any extends Any, not AnyRef."
+ (null: Any): AnyRef
+ }
+
+ locally {
+ (0: AnyVal): AnyRef
+ }
+
+ class AV(val a: Int) extends AnyVal
+
+ locally {
+ new AV(0): AnyRef
+ }
+}
diff --git a/test/files/neg/not-possible-cause.check b/test/files/neg/not-possible-cause.check
new file mode 100644
index 0000000000..5c09fa1545
--- /dev/null
+++ b/test/files/neg/not-possible-cause.check
@@ -0,0 +1,9 @@
+not-possible-cause.scala:2: error: type mismatch;
+ found : a.type (with underlying type A)
+ required: AnyRef
+Note that A is bounded only by Equals, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo[A <: Product](a: A) { type X = a.type }
+ ^
+one error found
diff --git a/test/files/neg/not-possible-cause.scala b/test/files/neg/not-possible-cause.scala
new file mode 100644
index 0000000000..83ec24dec8
--- /dev/null
+++ b/test/files/neg/not-possible-cause.scala
@@ -0,0 +1,3 @@
+object Foo {
+ def foo[A <: Product](a: A) { type X = a.type }
+}
diff --git a/test/files/neg/override.check b/test/files/neg/override.check
index fc152cb3b1..8be98bf4d0 100644
--- a/test/files/neg/override.check
+++ b/test/files/neg/override.check
@@ -1,5 +1,5 @@
override.scala:9: error: overriding type T in trait A with bounds >: Int <: Int;
type T in trait B with bounds >: String <: String has incompatible type
- lazy val x : A with B = x
+ lazy val x : A with B = {println(""); x}
^
one error found
diff --git a/test/files/neg/override.scala b/test/files/neg/override.scala
index 3e589b52e3..7975516061 100755
--- a/test/files/neg/override.scala
+++ b/test/files/neg/override.scala
@@ -6,7 +6,7 @@ trait X {
trait Y extends X {
trait B { type T >: String <: String }
- lazy val x : A with B = x
+ lazy val x : A with B = {println(""); x}
n = "foo"
}
diff --git a/test/files/neg/pat_unreachable.check b/test/files/neg/pat_unreachable.check
index 4e1463d591..c5706b7fad 100644
--- a/test/files/neg/pat_unreachable.check
+++ b/test/files/neg/pat_unreachable.check
@@ -4,4 +4,10 @@ pat_unreachable.scala:5: error: unreachable code
pat_unreachable.scala:9: error: unreachable code
case Seq(x, y) => List(x, y)
^
-two errors found
+pat_unreachable.scala:23: error: unreachable code
+ case c => println("matched c")
+ ^
+pat_unreachable.scala:24: error: unreachable code
+ case _ => println("matched neither")
+ ^
+four errors found
diff --git a/test/files/neg/pat_unreachable.scala b/test/files/neg/pat_unreachable.scala
index fc0fd41920..1f402e5212 100644
--- a/test/files/neg/pat_unreachable.scala
+++ b/test/files/neg/pat_unreachable.scala
@@ -8,7 +8,7 @@ object Test extends App {
case Seq(x, y, _*) => x::y::Nil
case Seq(x, y) => List(x, y)
}
-
+
def not_unreachable(xs:Seq[Char]) = xs match {
case Seq(x, y, _*) => x::y::Nil
case Seq(x) => List(x)
@@ -17,4 +17,10 @@ object Test extends App {
case Seq(x, y) => x::y::Nil
case Seq(x, y, z, _*) => List(x,y)
}
+
+ def contrivedExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case b => println("matched b")
+ case c => println("matched c")
+ case _ => println("matched neither")
+ }
}
diff --git a/test/files/neg/reify_ann2b.check b/test/files/neg/reify_ann2b.check
index 1cef405bef..d32bedaf8f 100644
--- a/test/files/neg/reify_ann2b.check
+++ b/test/files/neg/reify_ann2b.check
@@ -1,4 +1,4 @@
-reify_ann2b.scala:9: error: inner classes cannot be classfile annotations
- class ann(bar: String) extends annotation.ClassfileAnnotation
- ^
-one error found
+reify_ann2b.scala:9: error: inner classes cannot be classfile annotations
+ class ann(bar: String) extends annotation.ClassfileAnnotation
+ ^
+one error found
diff --git a/test/files/neg/reify_ann2b.scala b/test/files/neg/reify_ann2b.scala
index 2076af34c8..72d8c611cb 100644
--- a/test/files/neg/reify_ann2b.scala
+++ b/test/files/neg/reify_ann2b.scala
@@ -24,5 +24,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check
index efdbd7e282..75b7555b01 100644
--- a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check
+++ b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check
@@ -1,7 +1,7 @@
-reify_metalevel_breach_+0_refers_to_1.scala:10: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
-cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
-if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
-import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
- inner.splice
- ^
-one error found
+reify_metalevel_breach_+0_refers_to_1.scala:10: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ inner.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala
index 9499960480..e4d1edffc4 100644
--- a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala
+++ b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala
@@ -11,6 +11,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check
index 91755c789a..ca5556db02 100644
--- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check
@@ -1,7 +1,7 @@
-reify_metalevel_breach_-1_refers_to_0_a.scala:9: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
-cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
-if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
-import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
- val code = reify{outer.splice.splice}
- ^
-one error found
+reify_metalevel_breach_-1_refers_to_0_a.scala:9: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ val code = reify{outer.splice.splice}
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala
index e6aaeb9426..7397441586 100644
--- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala
@@ -9,6 +9,6 @@ object Test extends App {
val code = reify{outer.splice.splice}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check
index f703bfb05f..e34cb1ac1e 100644
--- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check
@@ -1,7 +1,7 @@
-reify_metalevel_breach_-1_refers_to_0_b.scala:12: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
-cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
-if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
-import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
- }.splice
- ^
-one error found
+reify_metalevel_breach_-1_refers_to_0_b.scala:12: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ }.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala
index 16dcae8683..4f27a44f0c 100644
--- a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala
@@ -13,6 +13,6 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check
index 068e4accd2..90b0e8dac6 100644
--- a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check
@@ -1,7 +1,7 @@
-reify_metalevel_breach_-1_refers_to_1.scala:10: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
-cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
-if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
-import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
- inner.splice.splice
- ^
-one error found
+reify_metalevel_breach_-1_refers_to_1.scala:10: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ inner.splice.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala
index 9600489f35..2f637301aa 100644
--- a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala
@@ -11,6 +11,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/neg/reify_nested_inner_refers_to_local.check b/test/files/neg/reify_nested_inner_refers_to_local.check
index 3f897bded9..68689b18d0 100644
--- a/test/files/neg/reify_nested_inner_refers_to_local.check
+++ b/test/files/neg/reify_nested_inner_refers_to_local.check
@@ -1,7 +1,7 @@
-reify_nested_inner_refers_to_local.scala:9: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
-cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
-if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
-import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
- reify{x}.splice
- ^
-one error found
+reify_nested_inner_refers_to_local.scala:9: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ reify{x}.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_nested_inner_refers_to_local.scala b/test/files/neg/reify_nested_inner_refers_to_local.scala
index fcbc1f7865..75ed1bf330 100644
--- a/test/files/neg/reify_nested_inner_refers_to_local.scala
+++ b/test/files/neg/reify_nested_inner_refers_to_local.scala
@@ -10,6 +10,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/neg/static-annot.check b/test/files/neg/static-annot.check
deleted file mode 100644
index 66efebdcee..0000000000
--- a/test/files/neg/static-annot.check
+++ /dev/null
@@ -1,19 +0,0 @@
-static-annot.scala:8: error: Only members of top-level objects and their nested objects can be annotated with @static.
- @static val bar = 1
- ^
-static-annot.scala:27: error: @static annotated field bar has the same name as a member of class Conflicting
- @static val bar = 1
- ^
-static-annot.scala:37: error: The @static annotation is only allowed on public members.
- @static private val bar = 1
- ^
-static-annot.scala:38: error: The @static annotation is only allowed on public members.
- @static private val baz = 2
- ^
-static-annot.scala:39: error: The @static annotation is not allowed on lazy members.
- @static lazy val bam = 3
- ^
-static-annot.scala:14: error: Only members of top-level objects and their nested objects can be annotated with @static.
- @static val blah = 2
- ^
-6 errors found \ No newline at end of file
diff --git a/test/files/neg/static-annot.scala b/test/files/neg/static-annot.scala
deleted file mode 100644
index c6c626d42b..0000000000
--- a/test/files/neg/static-annot.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-import annotation.static
-
-
-
-class StaticInClass {
- @static val bar = 1
-}
-
-
-class NestedObjectInClass {
- object Nested {
- @static val blah = 2
- }
-}
-
-
-object NestedObjectInObject {
- object Nested {
- @static val succeed = 3
- }
-}
-
-
-object Conflicting {
- @static val bar = 1
-}
-
-
-class Conflicting {
- val bar = 45
-}
-
-
-object PrivateProtectedLazy {
- @static private val bar = 1
- @static private val baz = 2
- @static lazy val bam = 3
-}
-
-
-class PrivateProtectedLazy {
- println(PrivateProtectedLazy.bar)
- println(PrivateProtectedLazy.baz)
- println(PrivateProtectedLazy.bam)
-}
diff --git a/test/files/neg/structural.check b/test/files/neg/structural.check
index 6ef57db1b4..5b2f352a76 100644
--- a/test/files/neg/structural.check
+++ b/test/files/neg/structural.check
@@ -1,28 +1,28 @@
structural.scala:47: error: Parameter type in structural refinement may not refer to the type of that refinement (self type)
val s1 = new { def f(p: this.type): Unit = () }
- ^
+ ^
structural.scala:10: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def f1[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: A): Object; val x: A }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:11: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def f2[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: B): Object; val x: B }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:12: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def f3[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: C): Object; val x: C }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:13: error: Parameter type in structural refinement may not refer to a type member of that refinement
def f4[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: D): Object; val x: D }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:42: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
type Summable[T] = { def +(v : T) : T }
- ^
+ ^
structural.scala:46: error: Parameter type in structural refinement may not refer to the type of that refinement (self type)
type S1 = { def f(p: this.type): Unit }
- ^
+ ^
structural.scala:49: error: Parameter type in structural refinement may not refer to a type member of that refinement
type S2 = { type T; def f(p: T): Unit }
- ^
+ ^
structural.scala:52: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def s3[U >: Null <: Object](p: { def f(p: U): Unit; def u: U }) = ()
- ^
+ ^
9 errors found
diff --git a/test/files/neg/t0565.check b/test/files/neg/t0565.check
index c5a64d0e53..98e61a2503 100644
--- a/test/files/neg/t0565.check
+++ b/test/files/neg/t0565.check
@@ -1,4 +1,4 @@
t0565.scala:8: error: Parameter type in structural refinement may not refer to a type member of that refinement
def z (w : T) : T } =
- ^
+ ^
one error found
diff --git a/test/files/neg/t2144.check b/test/files/neg/t2144.check
index 7239f4406e..670e188c2a 100644
--- a/test/files/neg/t2144.check
+++ b/test/files/neg/t2144.check
@@ -1,4 +1,4 @@
t2144.scala:2: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def foo[A](a: A) = new { def bar(x: A): A = x }
- ^
+ ^
one error found
diff --git a/test/files/neg/t2296a.check b/test/files/neg/t2296a.check
new file mode 100644
index 0000000000..863b861046
--- /dev/null
+++ b/test/files/neg/t2296a.check
@@ -0,0 +1,5 @@
+S.scala:6: error: Implementation restriction: trait S accesses protected method foo inside a concrete trait method.
+Add an accessor in a class extending class J as a workaround.
+ foo()
+ ^
+one error found
diff --git a/test/files/run/t2296a/J.java b/test/files/neg/t2296a/J.java
index 78ff3e9804..78ff3e9804 100644
--- a/test/files/run/t2296a/J.java
+++ b/test/files/neg/t2296a/J.java
diff --git a/test/files/run/t2296a/S.scala b/test/files/neg/t2296a/S.scala
index 532d038a42..532d038a42 100644
--- a/test/files/run/t2296a/S.scala
+++ b/test/files/neg/t2296a/S.scala
diff --git a/test/files/neg/t2296b.check b/test/files/neg/t2296b.check
new file mode 100644
index 0000000000..07cc54d573
--- /dev/null
+++ b/test/files/neg/t2296b.check
@@ -0,0 +1,5 @@
+S_2.scala:6: error: Implementation restriction: trait S accesses protected method foo inside a concrete trait method.
+Add an accessor in a class extending class J_1 as a workaround.
+ foo()
+ ^
+one error found
diff --git a/test/files/run/t2296b/J_1.java b/test/files/neg/t2296b/J_1.java
index 4c91d47073..4c91d47073 100644
--- a/test/files/run/t2296b/J_1.java
+++ b/test/files/neg/t2296b/J_1.java
diff --git a/test/files/run/t2296b/S_2.scala b/test/files/neg/t2296b/S_2.scala
index 6cdb0cfaba..6cdb0cfaba 100644
--- a/test/files/run/t2296b/S_2.scala
+++ b/test/files/neg/t2296b/S_2.scala
diff --git a/test/files/neg/t2775.check b/test/files/neg/t2775.check
index f357221cd9..934a970f2e 100644
--- a/test/files/neg/t2775.check
+++ b/test/files/neg/t2775.check
@@ -1,4 +1,4 @@
-t2775.scala:1: error: cannot find class tag for element type B.this.T
-trait B[S] { type T = S; val c = new Array[T](1) }
- ^
-one error found
+t2775.scala:1: error: cannot find class tag for element type B.this.T
+trait B[S] { type T = S; val c = new Array[T](1) }
+ ^
+one error found
diff --git a/test/files/neg/t3507-old.check b/test/files/neg/t3507-old.check
index 5c58444cb3..b3ac40473e 100644
--- a/test/files/neg/t3507-old.check
+++ b/test/files/neg/t3507-old.check
@@ -1,4 +1,4 @@
-t3507-old.scala:13: error: No Manifest available for _1.b.c.type.
- mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
- ^
-one error found
+t3507-old.scala:13: error: No Manifest available for _1.b.c.type.
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ ^
+one error found
diff --git a/test/files/neg/t3614.check b/test/files/neg/t3614.check
index 5fdb5cbf1f..0f9c83aa0d 100644
--- a/test/files/neg/t3614.check
+++ b/test/files/neg/t3614.check
@@ -1,4 +1,4 @@
-t3614.scala:2: error: class type required but AnyRef{def a: <?>} found
+t3614.scala:2: error: class type required but AnyRef{def a: Int} found
def v = new ({ def a=0 })
^
-one error found \ No newline at end of file
+one error found
diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check
index a6a1a1fad4..0f2fe6f2d1 100644
--- a/test/files/neg/t4425.check
+++ b/test/files/neg/t4425.check
@@ -1,5 +1,4 @@
-t4425.scala:3: error: error during expansion of this match (this is a scalac bug).
-The underlying error was: value _1 is not a member of object Foo.X
+t4425.scala:3: error: isInstanceOf cannot test if value types are references.
42 match { case _ X _ => () }
- ^
+ ^
one error found
diff --git a/test/files/neg/t4425.flags b/test/files/neg/t4425.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/neg/t4425.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/neg/t5031.check b/test/files/neg/t5031.check
index 8983d8daf9..2f1090c321 100644
--- a/test/files/neg/t5031.check
+++ b/test/files/neg/t5031.check
@@ -1,5 +1,5 @@
-Id.scala:3: error: Companions 'class Test' and 'object Test' must be defined in same file:
+package.scala:2: error: Companions 'class Test' and 'object Test' must be defined in same file:
Found in t5031/package.scala and t5031/Id.scala
-object Test
- ^
+ class Test
+ ^
one error found
diff --git a/test/files/neg/t5031b.check b/test/files/neg/t5031b.check
new file mode 100644
index 0000000000..3bc2284a4d
--- /dev/null
+++ b/test/files/neg/t5031b.check
@@ -0,0 +1,5 @@
+b.scala:3: error: Companions 'class Bippy' and 'object Bippy' must be defined in same file:
+ Found in t5031b/a.scala and t5031b/b.scala
+object Bippy
+ ^
+one error found
diff --git a/test/files/neg/t5031b/a.scala b/test/files/neg/t5031b/a.scala
new file mode 100644
index 0000000000..0ab9aa9769
--- /dev/null
+++ b/test/files/neg/t5031b/a.scala
@@ -0,0 +1,3 @@
+package foo
+
+class Bippy
diff --git a/test/files/neg/t5031b/b.scala b/test/files/neg/t5031b/b.scala
new file mode 100644
index 0000000000..bdef237af5
--- /dev/null
+++ b/test/files/neg/t5031b/b.scala
@@ -0,0 +1,3 @@
+package foo
+
+object Bippy
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
index 96eb1fd364..6edfdf2b1e 100644
--- a/test/files/neg/t5148.check
+++ b/test/files/neg/t5148.check
@@ -1,2 +1,3 @@
-error: bad reference while unpickling Imports.class: term memberHandlers not found in scala.tools.nsc.interpreter.IMain
-one error found
+error: bad symbolic reference to value global in class IMain - referenced from t5148.scala (a classfile may be missing)
+error: bad symbolic reference to value memberHandlers in class IMain - referenced from t5148.scala (a classfile may be missing)
+two errors found
diff --git a/test/files/neg/t5452-new.check b/test/files/neg/t5452-new.check
index 4bbeff3644..1850a7004a 100644
--- a/test/files/neg/t5452-new.check
+++ b/test/files/neg/t5452-new.check
@@ -1,8 +1,8 @@
-t5452-new.scala:30: error: overloaded method value apply with alternatives:
- ()Queryable[CoffeesTable] <and>
- (t: Tree)(implicit evidence$2: scala.reflect.ClassTag[CoffeesTable])Nothing <and>
- (implicit evidence$1: scala.reflect.ClassTag[CoffeesTable])Nothing
- cannot be applied to (Queryable[CoffeesTable])
- Queryable[CoffeesTable]( q.treeFilter(null) )
- ^
-one error found
+t5452-new.scala:30: error: overloaded method value apply with alternatives:
+ ()Queryable[CoffeesTable] <and>
+ (t: Tree)(implicit evidence$2: scala.reflect.ClassTag[CoffeesTable])Nothing <and>
+ (implicit evidence$1: scala.reflect.ClassTag[CoffeesTable])Nothing
+ cannot be applied to (Queryable[CoffeesTable])
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+ ^
+one error found
diff --git a/test/files/neg/t5452-old.check b/test/files/neg/t5452-old.check
index e5872a5759..1860c98c53 100644
--- a/test/files/neg/t5452-old.check
+++ b/test/files/neg/t5452-old.check
@@ -1,8 +1,8 @@
-t5452-old.scala:28: error: overloaded method value apply with alternatives:
- ()Queryable[CoffeesTable] <and>
- (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing <and>
- (implicit evidence$1: Manifest[CoffeesTable])Nothing
- cannot be applied to (Queryable[CoffeesTable])
- Queryable[CoffeesTable]( q.treeFilter(null) )
- ^
-one error found
+t5452-old.scala:28: error: overloaded method value apply with alternatives:
+ ()Queryable[CoffeesTable] <and>
+ (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing <and>
+ (implicit evidence$1: Manifest[CoffeesTable])Nothing
+ cannot be applied to (Queryable[CoffeesTable])
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+ ^
+one error found
diff --git a/test/files/neg/t5510.check b/test/files/neg/t5510.check
index 60da3bed40..04220e79bb 100644
--- a/test/files/neg/t5510.check
+++ b/test/files/neg/t5510.check
@@ -13,7 +13,7 @@ t5510.scala:5: error: unclosed string literal
t5510.scala:6: error: unclosed multi-line string literal
val s5 = ""s""" $s1 $s2 s"
^
-t5510.scala:7: error: '}' expected but eof found.
+t5510.scala:7: error: unclosed multi-line string literal
}
^
6 errors found
diff --git a/test/files/neg/t5687.check b/test/files/neg/t5687.check
new file mode 100644
index 0000000000..5096077ee5
--- /dev/null
+++ b/test/files/neg/t5687.check
@@ -0,0 +1,8 @@
+t5687.scala:4: error: type arguments [T] do not conform to class Template's type parameter bounds [T <: AnyRef]
+ type Repr[T]<:Template[T]
+ ^
+t5687.scala:20: error: overriding type Repr in class Template with bounds[T] <: Template[T];
+ type Repr has incompatible type
+ type Repr = CurveTemplate[T]
+ ^
+two errors found
diff --git a/test/files/neg/t5687.scala b/test/files/neg/t5687.scala
new file mode 100644
index 0000000000..90a9ae265c
--- /dev/null
+++ b/test/files/neg/t5687.scala
@@ -0,0 +1,55 @@
+abstract class Template[T <: AnyRef](private val t: T) {
+
+// type Repr[A<:AnyRef]<:Template[T]
+ type Repr[T]<:Template[T]
+
+ def access1(timeout: Int): Repr[T] = this.asInstanceOf[Repr[T]]
+ def access2: Repr[T] = this.asInstanceOf[Repr[T]]
+ val access3: Repr[T] = this.asInstanceOf[Repr[T]]
+ def access4(v: Repr[T]): Repr[T] = this.asInstanceOf[Repr[T]]
+ def access5(x: X): Repr[T] = this.asInstanceOf[Repr[T]]
+ def access5(x: Y): Repr[T] = this.asInstanceOf[Repr[T]]
+
+ def withReadModifiers(readModifiers:Int): Repr[T] = this.asInstanceOf[Repr[T]]
+}
+
+class Curve
+
+class CurveTemplate [T <: Curve](t: T) extends Template(t) {
+// type Repr[A<: AnyRef] = CurveTemplate[T]
+ type Repr = CurveTemplate[T]
+}
+
+class Base
+class X extends Base
+class Y extends Base
+
+
+object Example {
+ def test1() {
+ new CurveTemplate(new Curve).access1(10)
+
+ new CurveTemplate(new Curve).access2
+
+ new CurveTemplate(new Curve).access3
+
+ new CurveTemplate(new Curve).access4(null)
+
+ new CurveTemplate(new Curve).access5(new X)
+
+ ()
+
+ }
+
+ def test2() {
+ new CurveTemplate(new Curve).access1(10).withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access2.withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access3.withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access4(null).withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access5(new X).withReadModifiers(1)
+ }
+}
diff --git a/test/files/neg/t5689.check b/test/files/neg/t5689.check
index 3b25dd612e..50aaa7dbfe 100644
--- a/test/files/neg/t5689.check
+++ b/test/files/neg/t5689.check
@@ -1,7 +1,7 @@
-t5689.scala:4: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[String]
- found : (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[Int]
-type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String]
- def returnsString(i: Double): String = macro returnsIntImpl
- ^
-one error found
+t5689.scala:4: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[String]
+ found : (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[Int]
+type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String]
+ def returnsString(i: Double): String = macro returnsIntImpl
+ ^
+one error found
diff --git a/test/files/neg/t5692a.check b/test/files/neg/t5692a.check
new file mode 100644
index 0000000000..ded95a8820
--- /dev/null
+++ b/test/files/neg/t5692a.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: type parameter not specified
+ def x = Macros.foo
+ ^
+one error found
diff --git a/test/files/neg/t5692a.flags b/test/files/neg/t5692a.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/neg/t5692a.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/neg/t5692a/Macros_1.scala b/test/files/neg/t5692a/Macros_1.scala
new file mode 100644
index 0000000000..06b5a3de36
--- /dev/null
+++ b/test/files/neg/t5692a/Macros_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T](c: Context) = c.literalUnit
+ def foo[T] = macro impl[T]
+} \ No newline at end of file
diff --git a/test/files/neg/t5692a/Test_2.scala b/test/files/neg/t5692a/Test_2.scala
new file mode 100644
index 0000000000..08d510cc6f
--- /dev/null
+++ b/test/files/neg/t5692a/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ def x = Macros.foo
+} \ No newline at end of file
diff --git a/test/files/neg/t5692b.check b/test/files/neg/t5692b.check
new file mode 100644
index 0000000000..e453870ec8
--- /dev/null
+++ b/test/files/neg/t5692b.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: type parameters not specified
+ def x = Macros.foo
+ ^
+one error found
diff --git a/test/files/neg/t5692b.flags b/test/files/neg/t5692b.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/neg/t5692b.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/neg/t5692b/Macros_1.scala b/test/files/neg/t5692b/Macros_1.scala
new file mode 100644
index 0000000000..b28d19f903
--- /dev/null
+++ b/test/files/neg/t5692b/Macros_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T, U](c: Context) = c.literalUnit
+ def foo[T, U] = macro impl[T, U]
+} \ No newline at end of file
diff --git a/test/files/neg/t5692b/Test_2.scala b/test/files/neg/t5692b/Test_2.scala
new file mode 100644
index 0000000000..08d510cc6f
--- /dev/null
+++ b/test/files/neg/t5692b/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ def x = Macros.foo
+} \ No newline at end of file
diff --git a/test/files/neg/t5878.check b/test/files/neg/t5878.check
index 50dba0d272..c60c4653a2 100644
--- a/test/files/neg/t5878.check
+++ b/test/files/neg/t5878.check
@@ -1,13 +1,13 @@
-t5878.scala:1: error: value class may not unbox to itself
+t5878.scala:1: error: value class may not wrap another user-defined value class
case class Foo(x: Bar) extends AnyVal
- ^
-t5878.scala:2: error: value class may not unbox to itself
+ ^
+t5878.scala:2: error: value class may not wrap another user-defined value class
case class Bar(x: Foo) extends AnyVal
- ^
-t5878.scala:4: error: value class may not unbox to itself
+ ^
+t5878.scala:4: error: value class may not wrap another user-defined value class
class Foo1(val x: Bar1) extends AnyVal
- ^
-t5878.scala:5: error: value class may not unbox to itself
+ ^
+t5878.scala:5: error: value class may not wrap another user-defined value class
class Bar1(val x: Foo1) extends AnyVal
- ^
+ ^
four errors found
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
index f44a81c471..f6757f97e3 100644
--- a/test/files/neg/t6040.check
+++ b/test/files/neg/t6040.check
@@ -1,6 +1,6 @@
error: extension of type scala.Dynamic needs to be enabled
by making the implicit value language.dynamics visible.
-This can be achieved by adding the import clause 'import language.dynamics'
+This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
why the feature needs to be explicitly enabled.
diff --git a/test/files/neg/t6048.check b/test/files/neg/t6048.check
index 051f41877e..5bdf2eca88 100644
--- a/test/files/neg/t6048.check
+++ b/test/files/neg/t6048.check
@@ -4,7 +4,10 @@ t6048.scala:3: error: unreachable code
t6048.scala:8: error: unreachable code
case _ if false => x // unreachable
^
-t6048.scala:14: error: unreachable code
+t6048.scala:13: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+ case _ => x
+ ^
+t6048.scala:14: error: unreachable code due to variable pattern on line 13
case 5 if true => x // unreachable
^
-three errors found
+four errors found
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
new file mode 100644
index 0000000000..a7d3cc3238
--- /dev/null
+++ b/test/files/neg/t6162-inheritance.check
@@ -0,0 +1,10 @@
+t6162-inheritance.scala:6: error: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+class SubFoo extends Foo
+ ^
+t6162-inheritance.scala:11: error: inheritance from trait T in package t6126 is deprecated
+object SubT extends T
+ ^
+t6162-inheritance.scala:17: error: inheritance from trait S in package t6126 is deprecated
+ new S {
+ ^
+three errors found
diff --git a/test/files/neg/t6162-inheritance.flags b/test/files/neg/t6162-inheritance.flags
new file mode 100644
index 0000000000..65faf53579
--- /dev/null
+++ b/test/files/neg/t6162-inheritance.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation \ No newline at end of file
diff --git a/test/files/neg/t6162-inheritance.scala b/test/files/neg/t6162-inheritance.scala
new file mode 100644
index 0000000000..7b47b9285a
--- /dev/null
+++ b/test/files/neg/t6162-inheritance.scala
@@ -0,0 +1,19 @@
+package scala.t6126
+
+@deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
+class Foo
+
+class SubFoo extends Foo
+
+@deprecatedInheritance()
+trait T
+
+object SubT extends T
+
+@deprecatedInheritance()
+trait S
+
+object O {
+ new S {
+ }
+}
diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check
new file mode 100644
index 0000000000..e774888d36
--- /dev/null
+++ b/test/files/neg/t6162-overriding.check
@@ -0,0 +1,7 @@
+t6162-overriding.scala:14: error: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
+ override def bar = 43
+ ^
+t6162-overriding.scala:15: error: overriding method baz in class Bar is deprecated
+ override def baz = 43
+ ^
+two errors found
diff --git a/test/files/neg/t6162-overriding.flags b/test/files/neg/t6162-overriding.flags
new file mode 100644
index 0000000000..65faf53579
--- /dev/null
+++ b/test/files/neg/t6162-overriding.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation \ No newline at end of file
diff --git a/test/files/neg/t6162-overriding.scala b/test/files/neg/t6162-overriding.scala
new file mode 100644
index 0000000000..4cab0c2dee
--- /dev/null
+++ b/test/files/neg/t6162-overriding.scala
@@ -0,0 +1,17 @@
+package scala.t6162
+
+class Bar {
+ @deprecatedOverriding("`bar` will be made private in a future version.", "2.10.0")
+ def bar = 42
+
+ @deprecatedOverriding()
+ def baz = 42
+
+ def baz(a: Any) = 0
+}
+
+class SubBar extends Bar {
+ override def bar = 43
+ override def baz = 43
+ override def baz(a: Any) = 43 // okay
+}
diff --git a/test/files/neg/t6214.check b/test/files/neg/t6214.check
new file mode 100644
index 0000000000..6349a3e71c
--- /dev/null
+++ b/test/files/neg/t6214.check
@@ -0,0 +1,4 @@
+t6214.scala:5: error: missing parameter type
+ m { s => case class Foo() }
+ ^
+one error found
diff --git a/test/files/neg/t6214.scala b/test/files/neg/t6214.scala
new file mode 100644
index 0000000000..734acda35e
--- /dev/null
+++ b/test/files/neg/t6214.scala
@@ -0,0 +1,7 @@
+object Test {
+ def m(f: String => Unit) = 0
+ def m(f: Int => Unit) = 0
+ def foo {
+ m { s => case class Foo() }
+ }
+}
diff --git a/test/files/neg/t6227.check b/test/files/neg/t6227.check
new file mode 100644
index 0000000000..5e3c636712
--- /dev/null
+++ b/test/files/neg/t6227.check
@@ -0,0 +1,4 @@
+t6227.scala:2: error: illegal combination of modifiers: implicit and case for: class IntOps
+ implicit case class IntOps( i: Int ) {
+ ^
+one error found
diff --git a/test/files/neg/t6227.scala b/test/files/neg/t6227.scala
new file mode 100644
index 0000000000..46416839d1
--- /dev/null
+++ b/test/files/neg/t6227.scala
@@ -0,0 +1,6 @@
+object Test {
+ implicit case class IntOps( i: Int ) {
+ def twice = i * 2
+ }
+}
+
diff --git a/test/files/neg/t6258.check b/test/files/neg/t6258.check
new file mode 100644
index 0000000000..73363d8280
--- /dev/null
+++ b/test/files/neg/t6258.check
@@ -0,0 +1,16 @@
+t6258.scala:2: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: PartialFunction[?, Int]
+ val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param
+ ^
+t6258.scala:5: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: PartialFunction[?,Int]
+ foo { case a : Int => a } // undefined param
+ ^
+t6258.scala:22: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: PartialFunction[?,Any]
+ bar[M[Any]] (foo { // undefined param
+ ^
+three errors found
diff --git a/test/files/neg/t6258.scala b/test/files/neg/t6258.scala
new file mode 100644
index 0000000000..5046a4750a
--- /dev/null
+++ b/test/files/neg/t6258.scala
@@ -0,0 +1,25 @@
+object Test {
+ val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param
+
+ def foo[A](pf: PartialFunction[A, Int]) {};
+ foo { case a : Int => a } // undefined param
+
+ val g : PartialFunction[Int, _] = { case a : Int => a } // okay
+}
+
+
+// Another variation, seen in the wild with Specs2.
+class X {
+ trait Matcher[-T]
+
+ def bar[T](m: Matcher[T]) = null
+ def bar[T](i: Int) = null
+
+ def foo[T](p: PartialFunction[T, Any]): Matcher[T] = null
+
+ case class M[X](a: X)
+
+ bar[M[Any]] (foo { // undefined param
+ case M(_) => null
+ })
+}
diff --git a/test/files/neg/t6260.check b/test/files/neg/t6260.check
new file mode 100644
index 0000000000..2b7f1a8bfb
--- /dev/null
+++ b/test/files/neg/t6260.check
@@ -0,0 +1,13 @@
+t6260.scala:3: error: bridge generated for member method apply: (x$1: Box[X])Box[Y] in anonymous class $anonfun
+which overrides method apply: (v1: T1)R in trait Function1
+clashes with definition of the member itself;
+both have erased type (v1: Object)Object
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+ ^
+t6260.scala:8: error: bridge generated for member method apply: (x$1: Box[X])Box[Y] in anonymous class $anonfun
+which overrides method apply: (v1: T1)R in trait Function1
+clashes with definition of the member itself;
+both have erased type (v1: Object)Object
+ ((bx: Box[X]) => new Box(f(bx.x)))(self)
+ ^
+two errors found
diff --git a/test/files/neg/t6260.scala b/test/files/neg/t6260.scala
new file mode 100644
index 0000000000..93b5448227
--- /dev/null
+++ b/test/files/neg/t6260.scala
@@ -0,0 +1,17 @@
+class Box[X](val x: X) extends AnyVal {
+ def map[Y](f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+}
+
+object Test {
+ def map2[X, Y](self: Box[X], f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(self)
+
+ def main(args: Array[String]) {
+ val f = (x: Int) => x + 1
+ val g = (x: String) => x + x
+
+ map2(new Box(42), f)
+ new Box("abc") map g
+ }
+}
diff --git a/test/files/neg/t6263.check b/test/files/neg/t6263.check
new file mode 100644
index 0000000000..9e9c7c615b
--- /dev/null
+++ b/test/files/neg/t6263.check
@@ -0,0 +1,9 @@
+t6263.scala:5: error: type mismatch;
+ found : A.this.c.type (with underlying type C)
+ required: AnyRef
+Note that C extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ type t = c.type
+ ^
+one error found
diff --git a/test/files/neg/t6263.scala b/test/files/neg/t6263.scala
new file mode 100644
index 0000000000..6575185b5c
--- /dev/null
+++ b/test/files/neg/t6263.scala
@@ -0,0 +1,6 @@
+class C(val a: Any) extends AnyVal
+class A {
+ implicit def c2AnyRef(c: C): AnyRef = new {}
+ val c = new C(0)
+ type t = c.type
+}
diff --git a/test/files/neg/t6264.check b/test/files/neg/t6264.check
new file mode 100644
index 0000000000..438be4c39f
--- /dev/null
+++ b/test/files/neg/t6264.check
@@ -0,0 +1,4 @@
+t6264.scala:3: error: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure
+ x.isInstanceOf[Tuple2[_, Tuple1[_]]]
+ ^
+one error found
diff --git a/test/files/neg/t6264.flags b/test/files/neg/t6264.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t6264.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t6264.scala b/test/files/neg/t6264.scala
new file mode 100644
index 0000000000..dc3b727934
--- /dev/null
+++ b/test/files/neg/t6264.scala
@@ -0,0 +1,6 @@
+class Foo {
+ def foo(x: AnyRef): Unit = {
+ x.isInstanceOf[Tuple2[_, Tuple1[_]]]
+ ()
+ }
+}
diff --git a/test/files/neg/t6276.check b/test/files/neg/t6276.check
new file mode 100644
index 0000000000..0b3dfa5531
--- /dev/null
+++ b/test/files/neg/t6276.check
@@ -0,0 +1,19 @@
+t6276.scala:4: error: method a in class C does nothing other than call itself recursively
+ def a: Any = a // warn
+ ^
+t6276.scala:5: error: value b in class C does nothing other than call itself recursively
+ val b: Any = b // warn
+ ^
+t6276.scala:7: error: method c in class C does nothing other than call itself recursively
+ def c: Any = this.c // warn
+ ^
+t6276.scala:8: error: method d in class C does nothing other than call itself recursively
+ def d: Any = C.this.d // warn
+ ^
+t6276.scala:13: error: method a does nothing other than call itself recursively
+ def a: Any = a // warn
+ ^
+t6276.scala:22: error: method a does nothing other than call itself recursively
+ def a = a // warn
+ ^
+6 errors found
diff --git a/test/files/neg/t6276.flags b/test/files/neg/t6276.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t6276.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t6276.scala b/test/files/neg/t6276.scala
new file mode 100644
index 0000000000..bd0a473f71
--- /dev/null
+++ b/test/files/neg/t6276.scala
@@ -0,0 +1,44 @@
+object Test {
+ def foo(a: Int, b: Int, c: Int) {
+ class C {
+ def a: Any = a // warn
+ val b: Any = b // warn
+
+ def c: Any = this.c // warn
+ def d: Any = C.this.d // warn
+ }
+
+ def method {
+ // method local
+ def a: Any = a // warn
+ }
+
+ trait T {
+ def a: Any
+ }
+
+ new T {
+ // inherited return type
+ def a = a // warn
+ }
+
+ // no warnings below
+ new {
+ def a: Any = {println(""); a}
+ val b: Any = {println(""); b}
+ def c(i: Int): Any = c(i - 0)
+ }
+
+ class D {
+ def other: D = null
+ def foo: Any = other.foo
+ }
+
+ class E {
+ def foo: Any = 0
+ class D extends E {
+ override def foo: Any = E.this.foo
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t6283.check b/test/files/neg/t6283.check
new file mode 100644
index 0000000000..69e417ee93
--- /dev/null
+++ b/test/files/neg/t6283.check
@@ -0,0 +1,4 @@
+t6283.scala:1: error: `abstract' modifier cannot be used with value classes
+abstract class Funky(val i: Int) extends AnyVal
+ ^
+one error found
diff --git a/test/files/neg/t6283.scala b/test/files/neg/t6283.scala
new file mode 100644
index 0000000000..d41eb18a74
--- /dev/null
+++ b/test/files/neg/t6283.scala
@@ -0,0 +1 @@
+abstract class Funky(val i: Int) extends AnyVal
diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check
new file mode 100644
index 0000000000..670247887c
--- /dev/null
+++ b/test/files/neg/t6323a.check
@@ -0,0 +1,9 @@
+t6323a.scala:11: `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because:
+failed to typecheck the materialized tag:
+cannot create a TypeTag referring to local class Test.Test
+ val value = u.typeOf[Test]
+ ^
+t6323a.scala:11: error: No TypeTag available for Test
+ val value = u.typeOf[Test]
+ ^
+one error found
diff --git a/test/files/neg/t6323a.flags b/test/files/neg/t6323a.flags
new file mode 100644
index 0000000000..4c6cdb71e2
--- /dev/null
+++ b/test/files/neg/t6323a.flags
@@ -0,0 +1 @@
+-Xlog-implicits \ No newline at end of file
diff --git a/test/files/neg/t6323a.scala b/test/files/neg/t6323a.scala
new file mode 100644
index 0000000000..a203167f3c
--- /dev/null
+++ b/test/files/neg/t6323a.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => m}
+import scala.reflect.runtime.{universe => u}
+
+object Test extends App {
+ locally {
+ try {
+ case class Test(a:String,b:List[Int])
+
+ val lookAtMe = m.reflect(Test("a",List(5)))
+ val value = u.typeOf[Test]
+ val members = value.members
+ val member = value.members.filter(_.name.encoded == "a")
+ val aAccessor = lookAtMe.reflectMethod(member.head.asMethod)
+ val thisShouldBeA = aAccessor.apply()
+ println(thisShouldBeA)
+ } catch {
+ case ScalaReflectionException(msg) => println(msg)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/t6335.check b/test/files/neg/t6335.check
new file mode 100644
index 0000000000..1727a05eb2
--- /dev/null
+++ b/test/files/neg/t6335.check
@@ -0,0 +1,9 @@
+t6335.scala:6: error: method Z is defined twice
+ conflicting symbols both originated in file 't6335.scala'
+ implicit class Z[A](val i: A) { def zz = i }
+ ^
+t6335.scala:3: error: method X is defined twice
+ conflicting symbols both originated in file 't6335.scala'
+ implicit class X(val x: Int) { def xx = x }
+ ^
+two errors found
diff --git a/test/files/neg/t6335.scala b/test/files/neg/t6335.scala
new file mode 100644
index 0000000000..5c41e81ef5
--- /dev/null
+++ b/test/files/neg/t6335.scala
@@ -0,0 +1,7 @@
+object ImplicitClass {
+ def X(i: Int) {}
+ implicit class X(val x: Int) { def xx = x }
+
+ def Z[A](i: A) {}
+ implicit class Z[A](val i: A) { def zz = i }
+} \ No newline at end of file
diff --git a/test/files/neg/t6336.check b/test/files/neg/t6336.check
new file mode 100644
index 0000000000..f70a5f70ab
--- /dev/null
+++ b/test/files/neg/t6336.check
@@ -0,0 +1,7 @@
+t6336.scala:3: error: Parameter type in structural refinement may not refer to a user-defined value class
+ val a = new { def y[T](x: X[T]) = x.i }
+ ^
+t6336.scala:4: error: Result type in structural refinement may not refer to a user-defined value class
+ val b = new { def y[T](x: T): X[T] = new X(2) }
+ ^
+two errors found
diff --git a/test/files/neg/t6336.scala b/test/files/neg/t6336.scala
new file mode 100644
index 0000000000..b1d61f4dd2
--- /dev/null
+++ b/test/files/neg/t6336.scala
@@ -0,0 +1,12 @@
+object D {
+ def main(args: Array[String]) {
+ val a = new { def y[T](x: X[T]) = x.i }
+ val b = new { def y[T](x: T): X[T] = new X(2) }
+ val x = new X(3)
+ val t = a.y(x)
+ println(t)
+ }
+}
+
+class X[T](val i: Int) extends AnyVal
+
diff --git a/test/files/neg/t6337.check b/test/files/neg/t6337.check
new file mode 100644
index 0000000000..8448f71320
--- /dev/null
+++ b/test/files/neg/t6337.check
@@ -0,0 +1,7 @@
+t6337.scala:10: error: value class may not wrap another user-defined value class
+class X[T](val i: XX[T]) extends AnyVal
+ ^
+t6337.scala:20: error: value class may not wrap another user-defined value class
+class X1[T](val i: XX1[T]) extends AnyVal
+ ^
+two errors found
diff --git a/test/files/neg/t6337.scala b/test/files/neg/t6337.scala
new file mode 100644
index 0000000000..c3858f8c04
--- /dev/null
+++ b/test/files/neg/t6337.scala
@@ -0,0 +1,21 @@
+object C {
+
+ def main(args: Array[String]) = {
+ val x = new X(new XX(3))
+ println(x.i.x + 9)
+ }
+
+}
+
+class X[T](val i: XX[T]) extends AnyVal
+class XX[T](val x: T) extends AnyVal
+
+object C1 {
+ def main(args: Array[String]) {
+ val x = new X1(new XX1(Some(3)))
+ println(x.i.x.get + 9)
+ }
+}
+
+class X1[T](val i: XX1[T]) extends AnyVal
+class XX1[T](val x: Option[T]) extends AnyVal
diff --git a/test/files/neg/t6340.check b/test/files/neg/t6340.check
new file mode 100644
index 0000000000..f18b8c3f4b
--- /dev/null
+++ b/test/files/neg/t6340.check
@@ -0,0 +1,10 @@
+t6340.scala:11: error: value D is not a member of object Foo
+ import Foo.{ A, B, C, D, E, X, Y, Z }
+ ^
+t6340.scala:16: error: not found: type D
+ val d = new D
+ ^
+t6340.scala:17: error: not found: type W
+ val w = new W
+ ^
+three errors found
diff --git a/test/files/neg/t6340.scala b/test/files/neg/t6340.scala
new file mode 100644
index 0000000000..8934d5c15d
--- /dev/null
+++ b/test/files/neg/t6340.scala
@@ -0,0 +1,21 @@
+object Foo {
+ class A
+ class B
+ class C
+ class X
+ class Y
+ class Z
+}
+
+object Test {
+ import Foo.{ A, B, C, D, E, X, Y, Z }
+
+ val a = new A
+ val b = new B
+ val c = new C
+ val d = new D
+ val w = new W
+ val x = new X
+ val y = new Y
+ val z = new Z
+}
diff --git a/test/files/neg/t6359.check b/test/files/neg/t6359.check
new file mode 100644
index 0000000000..2aa1ac5035
--- /dev/null
+++ b/test/files/neg/t6359.check
@@ -0,0 +1,7 @@
+t6359.scala:3: error: value class may not have nested module definitions
+ object X
+ ^
+t6359.scala:4: error: value class may not have nested class definitions
+ class Y
+ ^
+two errors found
diff --git a/test/files/neg/t6359.scala b/test/files/neg/t6359.scala
new file mode 100644
index 0000000000..96550fd906
--- /dev/null
+++ b/test/files/neg/t6359.scala
@@ -0,0 +1,8 @@
+class M(val t: Int) extends AnyVal {
+ def lazyString = {
+ object X
+ class Y
+
+ () => {X; new Y}
+ }
+}
diff --git a/test/files/neg/t6385.check b/test/files/neg/t6385.check
new file mode 100644
index 0000000000..93e51e8927
--- /dev/null
+++ b/test/files/neg/t6385.check
@@ -0,0 +1,7 @@
+t6385.scala:12: error: bridge generated for member method x: ()C[T] in class C
+which overrides method x: ()C[T] in trait AA
+clashes with definition of the member itself;
+both have erased type ()Object
+ def x = this
+ ^
+one error found
diff --git a/test/files/neg/t6385.scala b/test/files/neg/t6385.scala
new file mode 100644
index 0000000000..cec58eec9e
--- /dev/null
+++ b/test/files/neg/t6385.scala
@@ -0,0 +1,13 @@
+object N {
+ def main(args: Array[String]) {
+ val y: AA[Int] = C(2)
+ val c: Int = y.x.y
+ println(c)
+ }
+}
+trait AA[T] extends Any {
+ def x: C[T]
+}
+case class C[T](val y: T) extends AnyVal with AA[T] {
+ def x = this
+}
diff --git a/test/files/neg/t900.check b/test/files/neg/t900.check
index 4611ceba8c..6fe26a31ac 100644
--- a/test/files/neg/t900.check
+++ b/test/files/neg/t900.check
@@ -1,10 +1,9 @@
t900.scala:4: error: type mismatch;
found : Foo.this.x.type (with underlying type Foo.this.bar)
required: AnyRef
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2stringfmt in object Predef of type (x: Any)scala.runtime.StringFormat
- and method any2stringadd in object Predef of type (x: Any)scala.runtime.StringAdd
- are possible conversion functions from Foo.this.x.type to AnyRef
+Note that bar is unbounded, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
def break(): x.type
^
one error found
diff --git a/test/files/neg/unit2anyref.check b/test/files/neg/unit2anyref.check
index 10fe1861f5..6d11461700 100644
--- a/test/files/neg/unit2anyref.check
+++ b/test/files/neg/unit2anyref.check
@@ -1,8 +1,6 @@
unit2anyref.scala:2: error: type mismatch;
found : Unit
required: AnyRef
-Note: Unit is not implicitly converted to AnyRef. You can safely
-pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so.
val x: AnyRef = () // this should not succeed.
^
one error found
diff --git a/test/files/neg/valueclasses-pavlov.check b/test/files/neg/valueclasses-pavlov.check
new file mode 100644
index 0000000000..031589edad
--- /dev/null
+++ b/test/files/neg/valueclasses-pavlov.check
@@ -0,0 +1,7 @@
+valueclasses-pavlov.scala:8: error: double definition:
+method foo:(x: Box2)String and
+method foo:(x: String)String at line 7
+have same type after erasure: (x: String)String
+ def foo(x: Box2) = "foo(Box2): ok"
+ ^
+one error found
diff --git a/test/files/neg/valueclasses-pavlov.scala b/test/files/neg/valueclasses-pavlov.scala
new file mode 100644
index 0000000000..a5858b2cf0
--- /dev/null
+++ b/test/files/neg/valueclasses-pavlov.scala
@@ -0,0 +1,23 @@
+trait Foo[T <: AnyVal] extends Any {
+ def foo(x: String): String
+ def foo(x: T): String
+}
+
+class Box1(val value: String) extends AnyVal with Foo[Box2] {
+ def foo(x: String) = "foo(String): ok"
+ def foo(x: Box2) = "foo(Box2): ok"
+}
+
+class Box2(val value: String) extends AnyVal
+
+
+object test2a {
+
+ def main(args: Array[String]) {
+ val b1 = new Box1(null)
+ val b2 = new Box2(null)
+ val f: Foo[Box2] = b1
+ println(f.foo(""))
+ println(f.foo(b2))
+ }
+}
diff --git a/test/files/neg/wrong-args-for-none.check b/test/files/neg/wrong-args-for-none.check
new file mode 100644
index 0000000000..d3b2d572ab
--- /dev/null
+++ b/test/files/neg/wrong-args-for-none.check
@@ -0,0 +1,4 @@
+wrong-args-for-none.scala:5: error: wrong number of arguments for pattern Test.Foo(x: Int,y: Int)
+ def f(x: Any) = x match { case Bar(Foo(5)) => }
+ ^
+one error found
diff --git a/test/files/neg/wrong-args-for-none.scala b/test/files/neg/wrong-args-for-none.scala
new file mode 100644
index 0000000000..1caa4782a3
--- /dev/null
+++ b/test/files/neg/wrong-args-for-none.scala
@@ -0,0 +1,6 @@
+object Test {
+ case class Foo(x: Int, y: Int)
+ case class Bar(x: AnyRef)
+
+ def f(x: Any) = x match { case Bar(Foo(5)) => }
+}
diff --git a/test/files/pos/SI-5788.scala b/test/files/pos/SI-5788.scala
index 93b84bde87..f292461804 100644
--- a/test/files/pos/SI-5788.scala
+++ b/test/files/pos/SI-5788.scala
@@ -1,4 +1,3 @@
-trait Test {
- trait B[T]
- private final def grow[T](): B[T] = grow[T]()
+trait Foo[@specialized(Int) A] {
+ final def bar(a:A):A = bar(a)
}
diff --git a/test/files/pos/hk-match/a.scala b/test/files/pos/hk-match/a.scala
new file mode 100644
index 0000000000..7144068f3c
--- /dev/null
+++ b/test/files/pos/hk-match/a.scala
@@ -0,0 +1,5 @@
+trait A {
+ type HKAlias[X] = List[X]
+
+ (null: Any) match { case f: Bippy[HKAlias] => f }
+}
diff --git a/test/files/pos/hk-match/b.scala b/test/files/pos/hk-match/b.scala
new file mode 100644
index 0000000000..f7d21f6383
--- /dev/null
+++ b/test/files/pos/hk-match/b.scala
@@ -0,0 +1 @@
+trait Bippy[E[X]]
diff --git a/test/files/pos/inline-access-levels.flags b/test/files/pos/inline-access-levels.flags
new file mode 100644
index 0000000000..882f40f050
--- /dev/null
+++ b/test/files/pos/inline-access-levels.flags
@@ -0,0 +1 @@
+-optimise -Xfatal-warnings -Yinline-warnings
diff --git a/test/files/pos/inline-access-levels/A_1.scala b/test/files/pos/inline-access-levels/A_1.scala
new file mode 100644
index 0000000000..479fe0fc71
--- /dev/null
+++ b/test/files/pos/inline-access-levels/A_1.scala
@@ -0,0 +1,10 @@
+package test
+
+object A {
+
+ private var x: Int = 0
+
+ @inline def actOnX(f: Int => Int) = {
+ x = f(x)
+ }
+}
diff --git a/test/files/pos/inline-access-levels/Test_2.scala b/test/files/pos/inline-access-levels/Test_2.scala
new file mode 100644
index 0000000000..12c9eb540f
--- /dev/null
+++ b/test/files/pos/inline-access-levels/Test_2.scala
@@ -0,0 +1,11 @@
+package test
+
+object Test {
+
+ def main(args: Array[String]) {
+
+ A.actOnX(_ + 1)
+
+ }
+
+}
diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala
new file mode 100644
index 0000000000..e1b81015ba
--- /dev/null
+++ b/test/files/pos/javaConversions-2.10-regression.scala
@@ -0,0 +1,17 @@
+import collection.{JavaConversions, mutable, concurrent}
+import JavaConversions._
+import java.util.concurrent.{ConcurrentHashMap => CHM}
+
+object Foo {
+ def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
+ asScalaConcurrentMap(new CHM())
+
+ def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
+ new CHM[K, V]()
+}
+
+object Bar {
+ def assertType[T](t: T) = t
+ val a = new CHM[String, String]() += (("", ""))
+ assertType[concurrent.Map[String, String]](a)
+}
diff --git a/test/files/pos/specializes-sym-crash.scala b/test/files/pos/specializes-sym-crash.scala
new file mode 100644
index 0000000000..c46f435ac4
--- /dev/null
+++ b/test/files/pos/specializes-sym-crash.scala
@@ -0,0 +1,26 @@
+import scala.collection._
+
+trait Foo[+A,
+ +Coll,
+ +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
+extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
+self =>
+
+ trait Transformed[+B] extends GenSeqView[B, Coll] with super.Transformed[B] {
+ def length: Int
+ def apply(idx: Int): B
+ override def toString = viewToString
+ }
+ trait Reversed extends Transformed[A] {
+ override def iterator: Iterator[A] = createReversedIterator
+ def length: Int = self.length
+ def apply(idx: Int): A = self.apply(length - 1 - idx)
+ final override protected[this] def viewIdentifier = "R"
+
+ private def createReversedIterator = {
+ var lst = List[A]()
+ for (elem <- self) lst ::= elem
+ lst.iterator
+ }
+ }
+}
diff --git a/test/files/pos/t4579.scala b/test/files/pos/t4579.scala
index 2404b19da1..8951ec011f 100644
--- a/test/files/pos/t4579.scala
+++ b/test/files/pos/t4579.scala
@@ -12,11 +12,11 @@ class LispTokenizer(s: String) extends Iterator[String] {
while (i < s.length() && s.charAt(i) <= ' ') i += 1
i < s.length()
}
- def next: String =
+ def next: String =
if (hasNext) {
val start = i
if (isDelimiter(s charAt i)) i += 1
- else
+ else
do i = i + 1
while (!isDelimiter(s charAt i))
s.substring(start, i)
@@ -235,7 +235,7 @@ object LispCaseClasses extends Lisp {
def string2lisp(s: String): Data = {
val it = new LispTokenizer(s);
- def parseExpr(token: String): Data = {
+ def parse(token: String): Data = {
if (token == "(") parseList
else if (token == ")") sys.error("unbalanced parentheses")
else if ('0' <= token.charAt(0) && token.charAt(0) <= '9')
@@ -246,9 +246,9 @@ object LispCaseClasses extends Lisp {
}
def parseList: Data = {
val token = it.next;
- if (token == ")") NIL() else CONS(parseExpr(token), parseList)
+ if (token == ")") NIL() else CONS(parse(token), parseList)
}
- parseExpr(it.next)
+ parse(it.next)
}
def lisp2string(d: Data): String = d.toString();
@@ -426,7 +426,7 @@ object LispAny extends Lisp {
def string2lisp(s: String): Data = {
val it = new LispTokenizer(s);
- def parseExpr(token: String): Data = {
+ def parse(token: String): Data = {
if (token == "(") parseList
else if (token == ")") sys.error("unbalanced parentheses")
//else if (Character.isDigit(token.charAt(0)))
@@ -438,9 +438,9 @@ object LispAny extends Lisp {
}
def parseList: List[Data] = {
val token = it.next;
- if (token == ")") Nil else parseExpr(token) :: parseList
+ if (token == ")") Nil else parse(token) :: parseList
}
- parseExpr(it.next)
+ parse(it.next)
}
}
diff --git a/test/files/pos/t5245.scala b/test/files/pos/t5245.scala
index 3a4b4d286e..763be9ec26 100644
--- a/test/files/pos/t5245.scala
+++ b/test/files/pos/t5245.scala
@@ -1,3 +1,3 @@
-object Foo {
+object Foo {
def bar = { var x = (); def foo() = x }
} \ No newline at end of file
diff --git a/test/files/pos/t5667.scala b/test/files/pos/t5667.scala
index 513de5b663..353eec93d6 100644
--- a/test/files/pos/t5667.scala
+++ b/test/files/pos/t5667.scala
@@ -1,6 +1,4 @@
object Main {
implicit class C(val s: String) extends AnyVal
implicit class C2(val s: String) extends AnyRef
-
- implicit case class Foo(i: Int)
}
diff --git a/test/files/pos/t5756.scala b/test/files/pos/t5756.scala
new file mode 100644
index 0000000000..45960fa8bd
--- /dev/null
+++ b/test/files/pos/t5756.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def tagme[T: TypeTag](x: T) = typeTag[T]
+ val foo = tagme{object Bar; Bar}
+} \ No newline at end of file
diff --git a/test/files/pos/t6022b.scala b/test/files/pos/t6022b.scala
new file mode 100644
index 0000000000..6ceb928162
--- /dev/null
+++ b/test/files/pos/t6022b.scala
@@ -0,0 +1,20 @@
+trait A
+trait B
+trait C
+trait AB extends B with A
+
+// two types are mutually exclusive if there is no equality symbol whose constant implies both
+object Test extends App {
+ def foo(x: Any) = x match {
+ case _ : C => println("C")
+ case _ : AB => println("AB")
+ case _ : (A with B) => println("AB'")
+ case _ : B => println("B")
+ case _ : A => println("A")
+ }
+
+ foo(new A {})
+ foo(new B {})
+ foo(new AB{})
+ foo(new C {})
+}
diff --git a/test/files/pos/t6034.scala b/test/files/pos/t6034.scala
new file mode 100644
index 0000000000..3558d7ff0b
--- /dev/null
+++ b/test/files/pos/t6034.scala
@@ -0,0 +1 @@
+final class OptPlus[+A](val x: A) extends AnyVal { }
diff --git a/test/files/pos/t6047.scala b/test/files/pos/t6047.scala
index 80d5e9668b..bc5f856bd2 100644
--- a/test/files/pos/t6047.scala
+++ b/test/files/pos/t6047.scala
@@ -4,17 +4,17 @@ import java.io.InputStream
object Macros {
def unpack[A](input: InputStream): A = macro unpack_impl[A]
- def unpack_impl[A: c.AbsTypeTag](c: Context)(input: c.Expr[InputStream]): c.Expr[A] = {
+ def unpack_impl[A: c.WeakTypeTag](c: Context)(input: c.Expr[InputStream]): c.Expr[A] = {
import c.universe._
def unpackcode(tpe: c.Type): c.Expr[_] = {
- if (tpe <:< implicitly[c.AbsTypeTag[Traversable[_]]].tpe) {
+ if (tpe <:< implicitly[c.WeakTypeTag[Traversable[_]]].tpe) {
}
???
}
- unpackcode(implicitly[c.AbsTypeTag[A]].tpe)
+ unpackcode(implicitly[c.WeakTypeTag[A]].tpe)
???
}
} \ No newline at end of file
diff --git a/test/files/pos/t6145.scala b/test/files/pos/t6145.scala
new file mode 100644
index 0000000000..28334d4420
--- /dev/null
+++ b/test/files/pos/t6145.scala
@@ -0,0 +1,11 @@
+object Test {
+ // the existential causes a cast and the cast makes searchClass not be in tail position
+ // can we get rid of the useless cast?
+ @annotation.tailrec
+ final def searchClass: Class[_] = {
+ "packageName" match {
+ case _ =>
+ searchClass
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t6184.scala b/test/files/pos/t6184.scala
new file mode 100644
index 0000000000..83a1306aca
--- /dev/null
+++ b/test/files/pos/t6184.scala
@@ -0,0 +1,7 @@
+trait Foo[TroubleSome] {
+ type T <: Foo[TroubleSome]
+
+ this match {
+ case e: Foo[_]#T => ???
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t6201.scala b/test/files/pos/t6201.scala
new file mode 100644
index 0000000000..366c1f26eb
--- /dev/null
+++ b/test/files/pos/t6201.scala
@@ -0,0 +1,13 @@
+class Test {
+ class Foo1 {
+ def must(x: scala.xml.Elem) = ()
+ }
+
+ class Foo2 {
+ def must(x: Int) = ()
+ }
+ implicit def toFoo1(s: scala.xml.Elem) = new Foo1()
+ implicit def toFoo2(s: scala.xml.Elem) = new Foo2()
+
+ def is: Unit = { (<a>{"a"}</a>).must(<a>{"b"}</a>) }
+} \ No newline at end of file
diff --git a/test/files/pos/t6204-a.scala b/test/files/pos/t6204-a.scala
new file mode 100644
index 0000000000..bd8d5c437e
--- /dev/null
+++ b/test/files/pos/t6204-a.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Bish {
+ def m {
+ object Bash {
+ typeOf[Option[_]]
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t6204-b.scala b/test/files/pos/t6204-b.scala
new file mode 100644
index 0000000000..86094d1a19
--- /dev/null
+++ b/test/files/pos/t6204-b.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+
+object Bosh {
+ def Besh {
+ new {
+ val t = typeOf[Option[_]]
+ val x = t
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t6205.scala b/test/files/pos/t6205.scala
new file mode 100644
index 0000000000..02d924fe85
--- /dev/null
+++ b/test/files/pos/t6205.scala
@@ -0,0 +1,18 @@
+// original code by reporter
+class A[T]
+class Test1 {
+ def x(backing: Map[A[_], Any]) =
+ for( (k: A[kt], v) <- backing)
+ yield (k: A[kt])
+}
+
+// this tests same thing as above, but independent of library classes,
+// earlier expansions eliminated as well as variance (everything's invariant)
+case class Holder[A](a: A)
+class Mapped[A] { def map[T](f: Holder[A] => T): Iterable[T] = ??? }
+class Test2 {
+ def works(backing: Mapped[A[_]]): Iterable[A[_]]
+ = backing.map(x =>
+ x match {case Holder(k: A[kt]) => (k: A[kt])}
+ )
+} \ No newline at end of file
diff --git a/test/files/pos/t6208.scala b/test/files/pos/t6208.scala
new file mode 100644
index 0000000000..dac571346d
--- /dev/null
+++ b/test/files/pos/t6208.scala
@@ -0,0 +1,4 @@
+object Test {
+ val col = collection.mutable.Queue(1,2,3)
+ val WORK: collection.mutable.Queue[Int] = col filterNot (_ % 2 == 0)
+}
diff --git a/test/files/pos/t6245/Base.java b/test/files/pos/t6245/Base.java
new file mode 100644
index 0000000000..651ea08bf2
--- /dev/null
+++ b/test/files/pos/t6245/Base.java
@@ -0,0 +1,5 @@
+package t1;
+
+public class Base {
+ protected Vis inner;
+}
diff --git a/test/files/pos/t6245/Foo.scala b/test/files/pos/t6245/Foo.scala
new file mode 100644
index 0000000000..f5f997fbff
--- /dev/null
+++ b/test/files/pos/t6245/Foo.scala
@@ -0,0 +1,9 @@
+import t1.Vis
+
+abstract class Foo extends t1.Base {
+ trait Nested {
+ def crash() {
+ inner
+ }
+ }
+}
diff --git a/test/files/pos/t6245/Vis.java b/test/files/pos/t6245/Vis.java
new file mode 100644
index 0000000000..4267f4e40b
--- /dev/null
+++ b/test/files/pos/t6245/Vis.java
@@ -0,0 +1,3 @@
+package t1;
+
+public class Vis { }
diff --git a/test/files/pos/t6259.scala b/test/files/pos/t6259.scala
new file mode 100644
index 0000000000..43361c417e
--- /dev/null
+++ b/test/files/pos/t6259.scala
@@ -0,0 +1,47 @@
+package t6259
+
+import scala.reflect.runtime.universe._
+
+class A[X](implicit val tt: TypeTag[X]) {}
+object B extends A[String]
+
+object C {
+ object D extends A[String]
+}
+
+trait E {
+ object F extends A[String]
+}
+
+class G {
+ object H extends A[String]
+}
+
+object Test {
+ val x = {
+ object InVal extends A[String]
+ 5
+ }
+
+}
+
+// Note: Both of these fail right now.
+
+trait NeedsEarly {
+ val x: AnyRef
+}
+
+object Early extends {
+ // Drops to this.getClass and is not ok...
+ val x = { object EarlyOk extends A[String]; EarlyOk }
+} with NeedsEarly
+
+
+class DoubleTrouble[X](x: AnyRef)(implicit override val tt: TypeTag[X]) extends A[X]
+
+object DoubleOk extends DoubleTrouble[String]({
+ // Drops to this.getClass and is an issue
+ object InnerTrouble extends A[String];
+ InnerTrouble
+})
+
diff --git a/test/files/pos/t6274.scala b/test/files/pos/t6274.scala
new file mode 100644
index 0000000000..cf769fc72d
--- /dev/null
+++ b/test/files/pos/t6274.scala
@@ -0,0 +1,13 @@
+trait Crash {
+
+ def foo(i: => Int) (j: Int): Int
+
+ def t = {
+ // var count = 0
+ foo {
+ var count = 0
+ count
+ } _
+ }
+
+}
diff --git a/test/files/pos/t6278-synth-def.scala b/test/files/pos/t6278-synth-def.scala
new file mode 100644
index 0000000000..b8b660fbe3
--- /dev/null
+++ b/test/files/pos/t6278-synth-def.scala
@@ -0,0 +1,30 @@
+
+package t6278
+
+import language.implicitConversions
+
+object test {
+ def ok() {
+ class Foo(val i: Int) {
+ def foo[A](body: =>A): A = body
+ }
+ implicit def toFoo(i: Int): Foo = new Foo(i)
+
+ val k = 1
+ k foo println("k?")
+ val j = 2
+ }
+ def nope() {
+ implicit class Foo(val i: Int) {
+ def foo[A](body: =>A): A = body
+ }
+
+ val k = 1
+ k foo println("k?")
+ //lazy
+ val j = 2
+ }
+ def main(args: Array[String]) {
+ ok(); nope()
+ }
+}
diff --git a/test/files/pos/t6335.scala b/test/files/pos/t6335.scala
new file mode 100644
index 0000000000..50e34092d1
--- /dev/null
+++ b/test/files/pos/t6335.scala
@@ -0,0 +1,25 @@
+object E extends Z {
+ def X = 3
+ implicit class X(val i: Int) {
+ def xx = i
+ }
+
+ def Y(a: Any) = 0
+ object Y
+ implicit class Y(val i: String) { def yy = i }
+
+ implicit class Z(val i: Boolean) { def zz = i }
+}
+
+trait Z {
+ def Z = 0
+}
+
+object Test {
+ import E._
+ 0.xx
+
+ "".yy
+
+ true.zz
+}
diff --git a/test/files/pos/t6367.scala b/test/files/pos/t6367.scala
new file mode 100644
index 0000000000..1214be7418
--- /dev/null
+++ b/test/files/pos/t6367.scala
@@ -0,0 +1,34 @@
+package play.api.libs.json.util
+
+trait FunctionalCanBuild[M[_]]{
+ def apply[A,B](ma:M[A], mb:M[B]):M[A ~ B]
+}
+
+trait Variant[M[_]]
+
+trait Functor[M[_]] extends Variant[M]{
+ def fmap[A,B](m:M[A], f: A => B): M[B]
+}
+
+case class ~[A,B](_1:A,_2:B)
+
+class FunctionalBuilder[M[_]](canBuild:FunctionalCanBuild[M]){
+ class CanBuild20[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](
+ m1:M[A1 ~ A2 ~ A3 ~ A4 ~ A5 ~ A6 ~ A7 ~ A8 ~ A9 ~ A10 ~ A11 ~ A12 ~ A13 ~ A14 ~ A15 ~ A16 ~ A17 ~ A18 ~ A19],
+ m2:M[A20]
+ ) {
+
+ def ~[A21](m3:M[A21]) = new CanBuild21(canBuild(m1,m2),m3)
+
+ def apply[B](f: (A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20) => B)(implicit fu:Functor[M]): M[B] =
+ fu.fmap[A1 ~ A2 ~ A3 ~ A4 ~ A5 ~ A6 ~ A7 ~ A8 ~ A9 ~ A10 ~ A11 ~ A12 ~ A13 ~ A14 ~ A15 ~ A16 ~ A17 ~ A18 ~ A19 ~ A20, B](
+ canBuild(m1, m2),
+ { case a1 ~ a2 ~ a3 ~ a4 ~ a5 ~ a6 ~ a7 ~ a8 ~ a9 ~ a10 ~ a11 ~ a12 ~ a13 ~ a14 ~ a15 ~ a16 ~ a17 ~ a18 ~ a19 ~ a20 =>
+ f(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20) }
+ )
+ }
+
+ class CanBuild21[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](m1:M[A1 ~ A2 ~ A3 ~ A4 ~ A5 ~ A6 ~ A7 ~ A8 ~ A9 ~ A10 ~ A11 ~ A12 ~ A13 ~ A14 ~ A15 ~ A16 ~ A17 ~ A18 ~ A19 ~ A20], m2:M[A21]){
+ }
+
+}
diff --git a/test/files/pos/typetags.scala b/test/files/pos/typetags.scala
index 33390d7b89..239a9b32ec 100644
--- a/test/files/pos/typetags.scala
+++ b/test/files/pos/typetags.scala
@@ -1,12 +1,16 @@
-import scala.reflect.{basis => rb}
-import scala.reflect.runtime.{universe => ru}
+// TODO come up with a non-trivial universe different from ru
+// an rewrite this test, so that it makes sure that cross-universe implicit searches work
+//
+// import scala.reflect.{basis => rb}
+// import scala.reflect.runtime.{universe => ru}
+// object Test {
+// def main(args: Array[String]) {
+// def foo(implicit t: rb.TypeTag[List[Int]]) {
+// println(t)
+// val t2: ru.TypeTag[_] = t in ru.rootMirror
+// println(t2)
+// }
+// }
+// }
-object Test {
- def main(args: Array[String]) {
- def foo(implicit t: rb.TypeTag[List[Int]]) {
- println(t)
- val t2: ru.TypeTag[_] = t in ru.rootMirror
- println(t2)
- }
- }
-}
+object Test extends App \ No newline at end of file
diff --git a/test/files/pos/z1720.scala b/test/files/pos/z1720.scala
new file mode 100644
index 0000000000..7394d428c1
--- /dev/null
+++ b/test/files/pos/z1720.scala
@@ -0,0 +1,16 @@
+package test
+
+class Thing {
+ def info: Info[this.type] = InfoRepository.getInfo(this)
+ def info2: Info[this.type] = {
+ def self: this.type = this
+ InfoRepository.getInfo(self)
+ }
+}
+
+trait Info[T]
+case class InfoImpl[T](thing: T) extends Info[T]
+
+object InfoRepository {
+ def getInfo(t: Thing): Info[t.type] = InfoImpl(t)
+}
diff --git a/test/files/presentation/recursive-ask.check b/test/files/presentation/recursive-ask.check
new file mode 100644
index 0000000000..357d2cf879
--- /dev/null
+++ b/test/files/presentation/recursive-ask.check
@@ -0,0 +1,4 @@
+[ outer] askForResponse
+[nested] askForResponse
+passed
+done
diff --git a/test/files/presentation/recursive-ask/RecursiveAsk.scala b/test/files/presentation/recursive-ask/RecursiveAsk.scala
new file mode 100644
index 0000000000..b0e29b3fd3
--- /dev/null
+++ b/test/files/presentation/recursive-ask/RecursiveAsk.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest {
+ override def execute(): Unit = recursiveAskForResponse()
+
+ def recursiveAskForResponse() {
+ val res0 = compiler.askForResponse( () => {
+ println("[ outer] askForResponse")
+ val res = compiler.askForResponse( () => { println("[nested] askForResponse") })
+ println (res.get(5000) match {
+ case Some(_) => "passed"
+ case None => "timeout"
+ })
+ })
+
+ res0.get
+
+ println("done")
+ }
+}
diff --git a/test/files/run/Meter.scala b/test/files/run/Meter.scala
index d94f338ca9..a10ad31b4a 100644
--- a/test/files/run/Meter.scala
+++ b/test/files/run/Meter.scala
@@ -1,8 +1,13 @@
package a {
+ abstract class BoxingConversions[Boxed, Unboxed] {
+ def box(x: Unboxed): Boxed
+ def unbox(x: Boxed): Unboxed
+ }
+
class Meter(val underlying: Double) extends AnyVal with _root_.b.Printable {
def + (other: Meter): Meter =
new Meter(this.underlying + other.underlying)
- def / (other: Meter): Double = this.underlying / other.underlying
+ def / (other: Meter)(implicit dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying
def / (factor: Double): Meter = new Meter(this.underlying / factor)
def < (other: Meter): Boolean = this.underlying < other.underlying
def toFoot: Foot = new Foot(this.underlying * 0.3048)
@@ -12,6 +17,8 @@ package a {
object Meter extends (Double => Meter) {
+ private[a] trait MeterArg
+
def apply(x: Double): Meter = new Meter(x)
implicit val boxings = new BoxingConversions[Meter, Double] {
@@ -80,7 +87,7 @@ object Test extends App {
println(m)
foo(arr)
}
- //
+ //
// { println("testing wrapped arrays")
// import collection.mutable.FlatArray
// val arr = FlatArray(x, y + x)
diff --git a/test/files/run/MeterCaseClass.scala b/test/files/run/MeterCaseClass.scala
index e5979cf761..39d95c2af5 100644
--- a/test/files/run/MeterCaseClass.scala
+++ b/test/files/run/MeterCaseClass.scala
@@ -1,8 +1,13 @@
package a {
+ abstract class BoxingConversions[Boxed, Unboxed] {
+ def box(x: Unboxed): Boxed
+ def unbox(x: Boxed): Unboxed
+ }
+
case class Meter(underlying: Double) extends AnyVal with _root_.b.Printable {
def + (other: Meter): Meter =
new Meter(this.underlying + other.underlying)
- def / (other: Meter): Double = this.underlying / other.underlying
+ def / (other: Meter)(implicit dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying
def / (factor: Double): Meter = new Meter(this.underlying / factor)
def < (other: Meter): Boolean = this.underlying < other.underlying
def toFoot: Foot = new Foot(this.underlying * 0.3048)
@@ -11,6 +16,8 @@ package a {
object Meter extends (Double => Meter) {
+ private[a] trait MeterArg
+
implicit val boxings = new BoxingConversions[Meter, Double] {
def box(x: Double) = new Meter(x)
def unbox(m: Meter) = m.underlying
@@ -77,7 +84,7 @@ object Test extends App {
println(m)
foo(arr)
}
- //
+ //
// { println("testing wrapped arrays")
// import collection.mutable.FlatArray
// val arr = FlatArray(x, y + x)
diff --git a/test/files/run/abstypetags_core.check b/test/files/run/abstypetags_core.check
index 8d20e099c4..980b4719bf 100644
--- a/test/files/run/abstypetags_core.check
+++ b/test/files/run/abstypetags_core.check
@@ -1,30 +1,30 @@
-true
-TypeTag[Byte]
-true
-TypeTag[Short]
-true
-TypeTag[Char]
-true
-TypeTag[Int]
-true
-TypeTag[Long]
-true
-TypeTag[Float]
-true
-TypeTag[Double]
-true
-TypeTag[Boolean]
-true
-TypeTag[Unit]
-true
-TypeTag[Any]
-true
-TypeTag[AnyVal]
-true
-TypeTag[AnyRef]
-true
-TypeTag[java.lang.Object]
-true
-TypeTag[Null]
-true
-TypeTag[Nothing]
+true
+TypeTag[Byte]
+true
+TypeTag[Short]
+true
+TypeTag[Char]
+true
+TypeTag[Int]
+true
+TypeTag[Long]
+true
+TypeTag[Float]
+true
+TypeTag[Double]
+true
+TypeTag[Boolean]
+true
+TypeTag[Unit]
+true
+TypeTag[Any]
+true
+TypeTag[AnyVal]
+true
+TypeTag[AnyRef]
+true
+TypeTag[java.lang.Object]
+true
+TypeTag[Null]
+true
+TypeTag[Nothing]
diff --git a/test/files/run/abstypetags_core.scala b/test/files/run/abstypetags_core.scala
index 226de94055..2692fec035 100644
--- a/test/files/run/abstypetags_core.scala
+++ b/test/files/run/abstypetags_core.scala
@@ -1,34 +1,34 @@
import scala.reflect.runtime.universe._
object Test extends App {
- println(implicitly[AbsTypeTag[Byte]] eq AbsTypeTag.Byte)
- println(implicitly[AbsTypeTag[Byte]])
- println(implicitly[AbsTypeTag[Short]] eq AbsTypeTag.Short)
- println(implicitly[AbsTypeTag[Short]])
- println(implicitly[AbsTypeTag[Char]] eq AbsTypeTag.Char)
- println(implicitly[AbsTypeTag[Char]])
- println(implicitly[AbsTypeTag[Int]] eq AbsTypeTag.Int)
- println(implicitly[AbsTypeTag[Int]])
- println(implicitly[AbsTypeTag[Long]] eq AbsTypeTag.Long)
- println(implicitly[AbsTypeTag[Long]])
- println(implicitly[AbsTypeTag[Float]] eq AbsTypeTag.Float)
- println(implicitly[AbsTypeTag[Float]])
- println(implicitly[AbsTypeTag[Double]] eq AbsTypeTag.Double)
- println(implicitly[AbsTypeTag[Double]])
- println(implicitly[AbsTypeTag[Boolean]] eq AbsTypeTag.Boolean)
- println(implicitly[AbsTypeTag[Boolean]])
- println(implicitly[AbsTypeTag[Unit]] eq AbsTypeTag.Unit)
- println(implicitly[AbsTypeTag[Unit]])
- println(implicitly[AbsTypeTag[Any]] eq AbsTypeTag.Any)
- println(implicitly[AbsTypeTag[Any]])
- println(implicitly[AbsTypeTag[AnyVal]] eq AbsTypeTag.AnyVal)
- println(implicitly[AbsTypeTag[AnyVal]])
- println(implicitly[AbsTypeTag[AnyRef]] eq AbsTypeTag.AnyRef)
- println(implicitly[AbsTypeTag[AnyRef]])
- println(implicitly[AbsTypeTag[Object]] eq AbsTypeTag.Object)
- println(implicitly[AbsTypeTag[Object]])
- println(implicitly[AbsTypeTag[Null]] eq AbsTypeTag.Null)
- println(implicitly[AbsTypeTag[Null]])
- println(implicitly[AbsTypeTag[Nothing]] eq AbsTypeTag.Nothing)
- println(implicitly[AbsTypeTag[Nothing]])
+ println(implicitly[WeakTypeTag[Byte]] eq WeakTypeTag.Byte)
+ println(implicitly[WeakTypeTag[Byte]])
+ println(implicitly[WeakTypeTag[Short]] eq WeakTypeTag.Short)
+ println(implicitly[WeakTypeTag[Short]])
+ println(implicitly[WeakTypeTag[Char]] eq WeakTypeTag.Char)
+ println(implicitly[WeakTypeTag[Char]])
+ println(implicitly[WeakTypeTag[Int]] eq WeakTypeTag.Int)
+ println(implicitly[WeakTypeTag[Int]])
+ println(implicitly[WeakTypeTag[Long]] eq WeakTypeTag.Long)
+ println(implicitly[WeakTypeTag[Long]])
+ println(implicitly[WeakTypeTag[Float]] eq WeakTypeTag.Float)
+ println(implicitly[WeakTypeTag[Float]])
+ println(implicitly[WeakTypeTag[Double]] eq WeakTypeTag.Double)
+ println(implicitly[WeakTypeTag[Double]])
+ println(implicitly[WeakTypeTag[Boolean]] eq WeakTypeTag.Boolean)
+ println(implicitly[WeakTypeTag[Boolean]])
+ println(implicitly[WeakTypeTag[Unit]] eq WeakTypeTag.Unit)
+ println(implicitly[WeakTypeTag[Unit]])
+ println(implicitly[WeakTypeTag[Any]] eq WeakTypeTag.Any)
+ println(implicitly[WeakTypeTag[Any]])
+ println(implicitly[WeakTypeTag[AnyVal]] eq WeakTypeTag.AnyVal)
+ println(implicitly[WeakTypeTag[AnyVal]])
+ println(implicitly[WeakTypeTag[AnyRef]] eq WeakTypeTag.AnyRef)
+ println(implicitly[WeakTypeTag[AnyRef]])
+ println(implicitly[WeakTypeTag[Object]] eq WeakTypeTag.Object)
+ println(implicitly[WeakTypeTag[Object]])
+ println(implicitly[WeakTypeTag[Null]] eq WeakTypeTag.Null)
+ println(implicitly[WeakTypeTag[Null]])
+ println(implicitly[WeakTypeTag[Nothing]] eq WeakTypeTag.Nothing)
+ println(implicitly[WeakTypeTag[Nothing]])
} \ No newline at end of file
diff --git a/test/files/run/abstypetags_serialize.check b/test/files/run/abstypetags_serialize.check
index aafb4761ad..bddc4523e6 100644
--- a/test/files/run/abstypetags_serialize.check
+++ b/test/files/run/abstypetags_serialize.check
@@ -1,2 +1,2 @@
-java.io.NotSerializableException: Test$$typecreator1$1
-java.io.NotSerializableException: Test$$typecreator2$1
+java.io.NotSerializableException: Test$$typecreator1$1
+java.io.NotSerializableException: Test$$typecreator2$1
diff --git a/test/files/run/abstypetags_serialize.scala b/test/files/run/abstypetags_serialize.scala
index 5b9142f6d5..93fb5dcd06 100644
--- a/test/files/run/abstypetags_serialize.scala
+++ b/test/files/run/abstypetags_serialize.scala
@@ -1,9 +1,10 @@
import java.io._
import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
- def test(tag: AbsTypeTag[_]) =
+ def test(tag: WeakTypeTag[_]) =
try {
val fout = new ByteArrayOutputStream()
val out = new ObjectOutputStream(fout)
@@ -13,7 +14,7 @@ object Test extends App {
val fin = new ByteArrayInputStream(fout.toByteArray)
val in = new ObjectInputStream(fin)
- val retag = in.readObject().asInstanceOf[scala.reflect.basis.AbsTypeTag[_]].in(cm)
+ val retag = in.readObject().asInstanceOf[ru.WeakTypeTag[_]].in(cm)
in.close()
fin.close()
@@ -24,8 +25,8 @@ object Test extends App {
}
def qwe[T, U[_]] = {
- test(implicitly[AbsTypeTag[T]])
- test(implicitly[AbsTypeTag[U[String]]])
+ test(implicitly[WeakTypeTag[T]])
+ test(implicitly[WeakTypeTag[U[String]]])
}
qwe
diff --git a/test/files/run/applydynamic_sip.check b/test/files/run/applydynamic_sip.check
index d94db4417e..6d04dc4524 100644
--- a/test/files/run/applydynamic_sip.check
+++ b/test/files/run/applydynamic_sip.check
@@ -20,3 +20,10 @@ qual.selectDynamic(sel)
qual.selectDynamic(sel)
.apply
.update(1, 1)
+qual.applyDynamic(apply)(a)
+qual.applyDynamic(apply)(a)
+qual.applyDynamic(apply)(a)
+qual.applyDynamic(apply)(a)
+qual.applyDynamicNamed(apply)((arg,a))
+qual.applyDynamicNamed(apply)((,a), (arg2,a2))
+qual.applyDynamic(update)(a, a2)
diff --git a/test/files/run/applydynamic_sip.scala b/test/files/run/applydynamic_sip.scala
index 57cb4349f7..cf918a82ed 100644
--- a/test/files/run/applydynamic_sip.scala
+++ b/test/files/run/applydynamic_sip.scala
@@ -55,4 +55,12 @@ object Test extends App {
qual.sel() = expr // parser turns this into qual.sel.update(expr)
qual.sel.apply(1)
qual.sel.apply(1) = 1
-} \ No newline at end of file
+
+ qual.apply(a)
+ qual.apply[String](a)
+ qual(a)
+ qual[String](a)
+ qual[T](arg = a)
+ qual(a, arg2 = "a2")
+ qual(a) = a2
+}
diff --git a/test/files/run/classmanifests_new_alias.check b/test/files/run/classmanifests_new_alias.check
index ffb8482f91..032521a929 100644
--- a/test/files/run/classmanifests_new_alias.check
+++ b/test/files/run/classmanifests_new_alias.check
@@ -1,2 +1,2 @@
-Int
-true
+Int
+true
diff --git a/test/files/run/classmanifests_new_core.check b/test/files/run/classmanifests_new_core.check
index ffb8482f91..032521a929 100644
--- a/test/files/run/classmanifests_new_core.check
+++ b/test/files/run/classmanifests_new_core.check
@@ -1,2 +1,2 @@
-Int
-true
+Int
+true
diff --git a/test/files/run/classtags_contextbound.check b/test/files/run/classtags_contextbound.check
index 4104d544ba..604122846e 100644
--- a/test/files/run/classtags_contextbound.check
+++ b/test/files/run/classtags_contextbound.check
@@ -1 +1 @@
-class [I
+class [I
diff --git a/test/files/run/classtags_core.check b/test/files/run/classtags_core.check
index 2241108ba0..d5c4386482 100644
--- a/test/files/run/classtags_core.check
+++ b/test/files/run/classtags_core.check
@@ -1,30 +1,30 @@
true
-ClassTag[byte]
+Byte
true
-ClassTag[short]
+Short
true
-ClassTag[char]
+Char
true
-ClassTag[int]
+Int
true
-ClassTag[long]
+Long
true
-ClassTag[float]
+Float
true
-ClassTag[double]
+Double
true
-ClassTag[boolean]
+Boolean
true
-ClassTag[void]
+Unit
true
-ClassTag[class java.lang.Object]
+Any
true
-ClassTag[class java.lang.Object]
+AnyVal
true
-ClassTag[class java.lang.Object]
+Object
true
-ClassTag[class java.lang.Object]
+Object
true
-ClassTag[class scala.runtime.Null$]
+Null
true
-ClassTag[Nothing]
+Nothing
diff --git a/test/files/run/classtags_multi.check b/test/files/run/classtags_multi.check
index 3a7f16c3a0..56da87eeb1 100644
--- a/test/files/run/classtags_multi.check
+++ b/test/files/run/classtags_multi.check
@@ -1,5 +1,5 @@
-ClassTag[int]
-ClassTag[class [I]
-ClassTag[class [[I]
-ClassTag[class [[[I]
-ClassTag[class [[[[I]
+Int
+Array[int]
+Array[Array[int]]
+Array[Array[Array[int]]]
+Array[Array[Array[Array[int]]]]
diff --git a/test/files/run/collections.check b/test/files/run/collections.check
index b87a5998c5..c24150b24d 100644
--- a/test/files/run/collections.check
+++ b/test/files/run/collections.check
@@ -2,6 +2,10 @@
test1: 14005
test2: 25005003, iters = 5000
test3: 25005003
+***** mutable.LinkedHashSet:
+test1: 14005
+test2: 25005003, iters = 5000
+test3: 25005003
***** immutable.Set:
test1: 14005
test2: 25005003, iters = 5000
@@ -18,6 +22,10 @@ test3: 25005003
test1: 14005
test2: 25005003, iters = 5000
test3: 25005003
+***** mutable.LinkedHashMap:
+test1: 14005
+test2: 25005003, iters = 5000
+test3: 25005003
***** immutable.Map:
test1: 14005
test2: 25005003, iters = 5000
diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala
index 60f0765e6a..69c40fae80 100644
--- a/test/files/run/collections.scala
+++ b/test/files/run/collections.scala
@@ -106,10 +106,12 @@ object Test extends App {
}
test("mutable.HashSet", new mutable.HashSet[Int], 5000)
+ test("mutable.LinkedHashSet", new mutable.LinkedHashSet[Int], 5000)
test("immutable.Set", immutable.Set[Int](), 5000)
test("immutable.ListSet", new immutable.ListSet[Int], 5000)
test("immutable.TreeSet", new immutable.TreeSet[Int], 5000)
test("mutable.HashMap", new mutable.HashMap[Int, Int], 5000)
+ test("mutable.LinkedHashMap", new mutable.LinkedHashMap[Int, Int], 5000)
test("immutable.Map", immutable.Map[Int, Int](), 5000)
test("immutable.TreeMap", new immutable.TreeMap[Int, Int], 5000)
test("immutable.ListMap", new immutable.ListMap[Int, Int], 3000)
diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check
index 1ad81a1350..e5bb013ed7 100644
--- a/test/files/run/colltest.check
+++ b/test/files/run/colltest.check
@@ -5,3 +5,4 @@ false
true
false
succeeded for 10 iterations.
+succeeded for 10 iterations.
diff --git a/test/files/run/colltest.scala b/test/files/run/colltest.scala
index ecd234bdd1..703e94a3c7 100644
--- a/test/files/run/colltest.scala
+++ b/test/files/run/colltest.scala
@@ -61,5 +61,6 @@ object Test extends App {
}
t3954
- new TestSet(HashSet.empty, new scala.collection.mutable.LinkedHashSet)
+ new TestSet(HashSet.empty, new LinkedHashSet)
+ new TestSet(new ImmutableSetAdaptor(collection.immutable.Set.empty[Int]), new LinkedHashSet)
}
diff --git a/test/files/run/colltest1.check b/test/files/run/colltest1.check
index 7377174281..5ec6286d9e 100644
--- a/test/files/run/colltest1.check
+++ b/test/files/run/colltest1.check
@@ -107,3 +107,5 @@ List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K)
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 1cbd932222..54adeb7cda 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -226,6 +226,7 @@ object Test extends App {
setTest(mutable.Set())
setTest(immutable.Set())
setTest(mutable.HashSet())
+ setTest(mutable.LinkedHashSet())
setTest(immutable.HashSet())
mapTest(Map())
@@ -233,5 +234,6 @@ object Test extends App {
mapTest(immutable.Map())
mapTest(immutable.TreeMap())
mutableMapTest(mutable.HashMap())
+ mutableMapTest(mutable.LinkedHashMap())
mapTest(immutable.HashMap())
}
diff --git a/test/files/run/compiler-asSeenFrom.check b/test/files/run/compiler-asSeenFrom.check
index 96e257d303..47d40b0331 100644
--- a/test/files/run/compiler-asSeenFrom.check
+++ b/test/files/run/compiler-asSeenFrom.check
@@ -269,8 +269,8 @@ value jZ { // after parser
value jZ { // after explicitouter
protected val $outer: D.this.type
- val ll$D$J$$$outer(): D.this.type
- val ll$C$I$$$outer(): C.this.type
+ val $outer(): D.this.type
+ val $outer(): C.this.type
def thisI(): I.this.type
def thisC(): C.this.type
def t2(): T2
@@ -279,9 +279,9 @@ value jZ { // after explicitouter
value jZ { // after erasure
protected val $outer: ll.D
- val ll$D$J$$$outer(): ll.D
+ val $outer(): ll.D
protected val $outer: ll.C
- val ll$C$I$$$outer(): ll.C
+ val $outer(): ll.C
def thisI(): ll.C#I
def thisC(): ll.C
def t2(): Object
@@ -290,9 +290,9 @@ value jZ { // after erasure
value jZ { // after flatten
protected val $outer: ll.D
- val ll$D$J$$$outer(): ll.D
+ val $outer(): ll.D
protected val $outer: ll.C
- val ll$C$I$$$outer(): ll.C
+ val $outer(): ll.C
def thisI(): ll.C#C$I
def thisC(): ll.C
def t2(): Object
diff --git a/test/files/run/constant-type.check b/test/files/run/constant-type.check
new file mode 100644
index 0000000000..dfd8be5297
--- /dev/null
+++ b/test/files/run/constant-type.check
@@ -0,0 +1,30 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> val s = transformedType(StringClass.toType).asInstanceOf[Type]
+s: $r.intp.global.Type = String
+
+scala> { println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+Class[String](classOf[java.lang.String])
+
+scala> { afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+Class(classOf[java.lang.String])
+
+scala> { ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+Class[String](classOf[java.lang.String])
+
+scala> { ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+Class(classOf[java.lang.String])
+
+scala>
+
+scala>
diff --git a/test/files/run/constant-type.scala b/test/files/run/constant-type.scala
new file mode 100644
index 0000000000..84539e2895
--- /dev/null
+++ b/test/files/run/constant-type.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.ReplTest
+
+// see the commit message to understand what this stuff is about
+// just a quick note:
+// transformedType returns an erased version of the type
+// as explained in the commit message, Type.erasure won't do for this test
+// because it does some postprocessing to the result of transformedType
+object Test extends ReplTest {
+ def code = """
+:power
+val s = transformedType(StringClass.toType).asInstanceOf[Type]
+{ println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+{ afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+{ ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+{ ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+ """
+}
diff --git a/test/files/run/dead-code-elimination.check b/test/files/run/dead-code-elimination.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/dead-code-elimination.check
diff --git a/test/files/run/dead-code-elimination.flags b/test/files/run/dead-code-elimination.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/run/dead-code-elimination.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/dead-code-elimination.scala b/test/files/run/dead-code-elimination.scala
new file mode 100644
index 0000000000..1af17c936b
--- /dev/null
+++ b/test/files/run/dead-code-elimination.scala
@@ -0,0 +1,33 @@
+
+// This testcase is a snippet that did not compile correctly under
+// pre-release 2.10.x. The relevant discussion around it can be
+// found at:
+// https://groups.google.com/forum/?fromgroups#!topic/scala-internals/qcyTjk8euUI[1-25]
+//
+// The reason it did not compile is related to the fact that ICode
+// ops did not correctly define the stack entries they consumed and
+// the dead code elimination phase was unable to correctly reconstruct
+// the stack after code elimination.
+//
+// Originally, this did not compile, but I included it in the run
+// tests because this was ASM-dependand and did not happen for GenJVM.
+//
+// Thus, we run the code and force the loading of class B -- if the
+// bytecode is incorrect, it will fail the test.
+
+final class A {
+ def f1 = true
+ def f2 = true
+ @inline def f3 = f1 || f2
+ class B {
+ def f() = 1 to 10 foreach (_ => f3)
+ }
+ def f = (new B).f()
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ // force the loading of B
+ (new A).f
+ }
+}
diff --git a/test/files/run/dynamic-anyval.check b/test/files/run/dynamic-anyval.check
new file mode 100644
index 0000000000..dee7bef8e8
--- /dev/null
+++ b/test/files/run/dynamic-anyval.check
@@ -0,0 +1,4 @@
+().dingo(bippy, 5)
+List(1, 2, 3).dingo(bippy, 5)
+().dingo(bippy, 5)
+List(1, 2, 3).dingo(bippy, 5)
diff --git a/test/files/run/dynamic-anyval.scala b/test/files/run/dynamic-anyval.scala
new file mode 100644
index 0000000000..605503d377
--- /dev/null
+++ b/test/files/run/dynamic-anyval.scala
@@ -0,0 +1,22 @@
+import scala.language.dynamics
+
+object Test {
+ implicit class DynamicValue[T](val value: T) extends AnyVal with Dynamic {
+ def applyDynamic(name: String)(args: Any*) = println(s"""$this.$name(${args mkString ", "})""")
+ override def toString = "" + value
+ }
+ implicit class DynamicValue2[T](val value: T) extends Dynamic {
+ def applyDynamic(name: String)(args: Any*) = println(s"""$this.$name(${args mkString ", "})""")
+ override def toString = "" + value
+ }
+
+ def f[T](x: DynamicValue[T]) = x.dingo("bippy", 5)
+ def g[T](x: DynamicValue2[T]) = x.dingo("bippy", 5)
+
+ def main(args: Array[String]): Unit = {
+ f(())
+ f(List(1, 2, 3))
+ g(())
+ g(List(1, 2, 3))
+ }
+}
diff --git a/test/files/run/empty-array.check b/test/files/run/empty-array.check
new file mode 100644
index 0000000000..bb0b1cf658
--- /dev/null
+++ b/test/files/run/empty-array.check
@@ -0,0 +1,3 @@
+0
+0
+0
diff --git a/test/files/run/empty-array.scala b/test/files/run/empty-array.scala
new file mode 100644
index 0000000000..6e37dca37d
--- /dev/null
+++ b/test/files/run/empty-array.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(Array.emptyByteArray.length)
+ println(Array.emptyDoubleArray.length)
+ println(Array.emptyBooleanArray.length)
+ // okay okay okay
+ }
+}
diff --git a/test/files/run/existentials3-new.check b/test/files/run/existentials3-new.check
index 0d6f694a68..00614b19db 100644
--- a/test/files/run/existentials3-new.check
+++ b/test/files/run/existentials3-new.check
@@ -1,24 +1,24 @@
-Bar.type, t=AbstractTypeRef, s= <: scala.runtime.AbstractFunction0[Bar] with Serializable{case def unapply(x$0: Bar): Boolean} with Singleton
-Bar, t=AbstractTypeRef, s= <: Test.ToS with Product with Serializable{def copy(): Bar}
-Test.ToS, t=RefinedType, s=f3
-Test.ToS, t=RefinedType, s=f4
-Test.ToS, t=RefinedType, s=f5
-() => Test.ToS, t=TypeRef, s=class Function0
-() => Test.ToS, t=TypeRef, s=class Function0
-$anon, t=AbstractTypeRef, s= <: B with Test.ToS
-$anon, t=AbstractTypeRef, s= <: B with A with Test.ToS
-List[java.lang.Object{type T1}#T1], t=TypeRef, s=class List
-List[Seq[Int]], t=TypeRef, s=class List
-List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
-Bar.type, t=AbstractTypeRef, s= <: scala.runtime.AbstractFunction0[Bar] with Serializable{case def unapply(x$0: Bar): Boolean} with Singleton
-Bar, t=AbstractTypeRef, s= <: Test.ToS with Product with Serializable{def copy(): Bar}
-Test.ToS, t=RefinedType, s=g3
-Test.ToS, t=RefinedType, s=g4
-Test.ToS, t=RefinedType, s=g5
-() => Test.ToS, t=TypeRef, s=class Function0
-() => Test.ToS, t=TypeRef, s=class Function0
-$anon, t=AbstractTypeRef, s= <: B with Test.ToS
-$anon, t=AbstractTypeRef, s= <: B with A with Test.ToS
-List[java.lang.Object{type T1}#T1], t=TypeRef, s=class List
-List[Seq[Int]], t=TypeRef, s=class List
-List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
+Bar.type, t=TypeRef, s=type Bar.type
+Bar, t=TypeRef, s=type Bar
+Test.ToS, t=RefinedType, s=f3
+Test.ToS, t=RefinedType, s=f4
+Test.ToS, t=RefinedType, s=f5
+() => Test.ToS, t=TypeRef, s=trait Function0
+() => Test.ToS, t=TypeRef, s=trait Function0
+$anon, t=TypeRef, s=type $anon
+$anon, t=TypeRef, s=type $anon
+List[java.lang.Object{type T1}#T1], t=TypeRef, s=class List
+List[Seq[Int]], t=TypeRef, s=class List
+List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
+Bar.type, t=TypeRef, s=type Bar.type
+Bar, t=TypeRef, s=type Bar
+Test.ToS, t=RefinedType, s=g3
+Test.ToS, t=RefinedType, s=g4
+Test.ToS, t=RefinedType, s=g5
+() => Test.ToS, t=TypeRef, s=trait Function0
+() => Test.ToS, t=TypeRef, s=trait Function0
+$anon, t=TypeRef, s=type $anon
+$anon, t=TypeRef, s=type $anon
+List[java.lang.Object{type T1}#T1], t=TypeRef, s=class List
+List[Seq[Int]], t=TypeRef, s=class List
+List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
diff --git a/test/files/run/existentials3-new.scala b/test/files/run/existentials3-new.scala
index 649fac8327..110c8eff7a 100644
--- a/test/files/run/existentials3-new.scala
+++ b/test/files/run/existentials3-new.scala
@@ -35,34 +35,34 @@ object Test {
def printTpe(t: Type) = {
val s = if (t.typeSymbol.isFreeType) t.typeSymbol.typeSignature.toString else t.typeSymbol.toString
- println("%s, t=%s, s=%s".format(t, t.kind, s))
+ println("%s, t=%s, s=%s".format(t, t.asInstanceOf[Product].productPrefix, s))
}
def m[T: TypeTag](x: T) = printTpe(typeOf[T])
- def m2[T: AbsTypeTag](x: T) = printTpe(implicitly[AbsTypeTag[T]].tpe)
+ def m2[T: WeakTypeTag](x: T) = printTpe(implicitly[WeakTypeTag[T]].tpe)
// tags do work for f10/g10
def main(args: Array[String]): Unit = {
- m(f1)
- m(f2)
+ m2(f1)
+ m2(f2)
m(f3)
m(f4)
m(f5)
m(f6)
m(f7)
- m(f8)
- m(f9)
+ m2(f8)
+ m2(f9)
m2(f10)
m(f11)
m(f12)
- m(g1)
- m(g2)
+ m2(g1)
+ m2(g2)
m(g3)
m(g4)
m(g5)
m(g6)
m(g7)
- m(g8)
- m(g9)
+ m2(g8)
+ m2(g9)
m2(g10)
m(g11)
m(g12)
diff --git a/test/files/run/exprs_serialize.check b/test/files/run/exprs_serialize.check
index a3bf9ccdc4..20ad6c110c 100644
--- a/test/files/run/exprs_serialize.check
+++ b/test/files/run/exprs_serialize.check
@@ -1,2 +1,2 @@
-java.io.NotSerializableException: Test$$treecreator1$1
-java.io.NotSerializableException: Test$$treecreator2$1
+java.io.NotSerializableException: Test$$treecreator1$1
+java.io.NotSerializableException: Test$$treecreator2$1
diff --git a/test/files/run/exprs_serialize.scala b/test/files/run/exprs_serialize.scala
index 075c902a34..c4310b0fe1 100644
--- a/test/files/run/exprs_serialize.scala
+++ b/test/files/run/exprs_serialize.scala
@@ -1,5 +1,6 @@
import java.io._
import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
@@ -13,7 +14,7 @@ object Test extends App {
val fin = new ByteArrayInputStream(fout.toByteArray)
val in = new ObjectInputStream(fin)
- val reexpr = in.readObject().asInstanceOf[scala.reflect.basis.Expr[_]].in(cm)
+ val reexpr = in.readObject().asInstanceOf[ru.Expr[_]].in(cm)
in.close()
fin.close()
diff --git a/test/files/run/freetypes_false_alarm1.check b/test/files/run/freetypes_false_alarm1.check
index 92efacc2b7..a9df3544ac 100644
--- a/test/files/run/freetypes_false_alarm1.check
+++ b/test/files/run/freetypes_false_alarm1.check
@@ -1 +1 @@
-List[Int]
+List[Int]
diff --git a/test/files/run/getClassTest-valueClass.check b/test/files/run/getClassTest-valueClass.check
new file mode 100644
index 0000000000..7608d92b4e
--- /dev/null
+++ b/test/files/run/getClassTest-valueClass.check
@@ -0,0 +1,2 @@
+int
+class V
diff --git a/test/files/run/getClassTest-valueClass.scala b/test/files/run/getClassTest-valueClass.scala
new file mode 100644
index 0000000000..05a116dfff
--- /dev/null
+++ b/test/files/run/getClassTest-valueClass.scala
@@ -0,0 +1,10 @@
+class V(val x: Int) extends AnyVal
+
+object Test {
+ def main(args: Array[String]) = {
+ val v = new V(2)
+ val s: Any = 2
+ println(2.getClass)
+ println(v.getClass)
+ }
+}
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 25e1b2a4dd..2bc72893e7 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -13,6 +13,10 @@
< 92 JUMP 2
<
< 2:
+383c382
+< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
+---
+> locals: value args, variable result, value ex6, value x4, value x5, value x
385c384
< blocks: [1,2,3,4,5,8,11,13,14,16]
---
@@ -34,11 +38,21 @@
< 101 JUMP 4
<
< 4:
-515c520
+438,441d442
+< 106 LOAD_LOCAL(value x5)
+< 106 CALL_METHOD MyException.message (dynamic)
+< 106 STORE_LOCAL(value message)
+< 106 SCOPE_ENTER value message
+443c444,445
+< 106 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+515c517
< blocks: [1,2,3,4,6,7,8,9,10]
---
> blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
-544c549,554
+544c546,551
< 306 THROW(MyException)
---
> ? JUMP 11
@@ -47,22 +61,22 @@
> ? LOAD_LOCAL(variable monitor4)
> 305 MONITOR_EXIT
> ? JUMP 12
-550c560,566
+550c557
< ? THROW(Throwable)
---
> ? JUMP 12
+556c563,570
+< ? THROW(Throwable)
+---
+> ? STORE_LOCAL(value t)
+> ? JUMP 13
>
> 12:
> ? LOAD_LOCAL(variable monitor3)
> 304 MONITOR_EXIT
> ? STORE_LOCAL(value t)
> ? JUMP 13
-556c572,585
-< ? THROW(Throwable)
----
-> ? STORE_LOCAL(value t)
-> ? JUMP 13
->
+571a586,597
> 13:
> 310 LOAD_MODULE object Predef
> 310 CALL_PRIMITIVE(StartConcat)
@@ -74,19 +88,20 @@
> 310 CALL_PRIMITIVE(EndConcat)
> 310 CALL_METHOD scala.Predef.println (dynamic)
> 310 JUMP 2
-580c609
+>
+580c606
< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
---
> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
-583c612
+583c609
< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
---
> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
-615c644
+615c641
< blocks: [1,2,3,4,5,6,7,9,10]
---
> blocks: [1,2,3,4,5,6,7,9,10,11,12]
-639c668,674
+639c665,671
< 78 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
@@ -96,12 +111,12 @@
> 81 LOAD_LOCAL(value e)
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
-668c703,717
+668c700,701
< 81 THROW(Exception)
---
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
->
+684a718,730
> 12:
> 83 LOAD_MODULE object Predef
> 83 CONSTANT("finally")
@@ -114,15 +129,20 @@
> 84 STORE_LOCAL(variable result)
> 84 LOAD_LOCAL(variable exc1)
> 84 THROW(Throwable)
-690c739
+>
+690c736
< catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
---
> catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
-716c765
+714c760
+< locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
+---
+> locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
+716c762
< blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31]
---
> blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31,32,33,34]
-740c789,796
+740c786,793
< 172 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -133,33 +153,64 @@
> 170 STORE_LOCAL(value x4)
> 170 SCOPE_ENTER value x4
> 170 JUMP 18
-798c854,855
+787,790d839
+< 175 LOAD_LOCAL(value x5)
+< 175 CALL_METHOD MyException.message (dynamic)
+< 175 STORE_LOCAL(value message)
+< 175 SCOPE_ENTER value message
+792c841,842
+< 176 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+796c846,847
+< 177 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+798c849,850
< 177 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
> ? JUMP 33
-802c859,866
+802c854,855
< 170 THROW(Throwable)
---
> ? STORE_LOCAL(value ex6)
> ? JUMP 33
->
+811a865,870
> 33:
> 169 LOAD_LOCAL(value ex6)
> 169 STORE_LOCAL(value x4)
> 169 SCOPE_ENTER value x4
> 169 JUMP 5
-837c901,902
+>
+826,829d884
+< 180 LOAD_LOCAL(value x5)
+< 180 CALL_METHOD MyException.message (dynamic)
+< 180 STORE_LOCAL(value message)
+< 180 SCOPE_ENTER value message
+831c886,887
+< 181 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+835c891,892
+< 182 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+837c894,895
< 182 THROW(MyException)
---
> ? STORE_LOCAL(variable exc2)
> ? JUMP 34
-841c906,907
+841c899,900
< 169 THROW(Throwable)
---
> ? STORE_LOCAL(variable exc2)
> ? JUMP 34
-842a909,921
+857a917,929
> 34:
> 184 LOAD_MODULE object Predef
> 184 CONSTANT("finally")
@@ -173,19 +224,23 @@
> 185 LOAD_LOCAL(variable exc2)
> 185 THROW(Throwable)
>
-863c942
+863c935
< catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30) starting at: 4
---
> catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30, 32) starting at: 4
-866c945
+866c938
< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30) starting at: 3
---
> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30, 32, 33) starting at: 3
-892c971
+890c962
+< locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
+---
+> locals: value args, variable result, value e, value ex6, value x4, value x5, value x
+892c964
< blocks: [1,2,3,6,7,8,11,14,16,17,19]
---
> blocks: [1,2,3,6,7,8,11,14,16,17,19,20]
-916c995,1002
+916c988,995
< 124 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -196,15 +251,29 @@
> 122 STORE_LOCAL(value x4)
> 122 SCOPE_ENTER value x4
> 122 JUMP 7
-979c1065
+945,948d1023
+< 127 LOAD_LOCAL(value x5)
+< 127 CALL_METHOD MyException.message (dynamic)
+< 127 STORE_LOCAL(value message)
+< 127 SCOPE_ENTER value message
+950c1025,1026
+< 127 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+979c1055
< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19) starting at: 3
---
> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19, 20) starting at: 3
-1005c1091
+1003c1079
+< locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
+---
+> locals: value args, variable result, value ex6, value x4, value x5, value x, value e
+1005c1081
< blocks: [1,2,3,4,5,8,11,15,16,17,19]
---
> blocks: [1,2,3,5,8,11,15,16,17,19,20]
-1029c1115,1124
+1029c1105,1114
< 148 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -217,15 +286,25 @@
> 154 LOAD_LOCAL(value x4)
> 154 IS_INSTANCE REF(class MyException)
> 154 CZJUMP (BOOL)NE ? 5 : 11
-1050,1052d1144
+1050,1052d1134
< 145 JUMP 4
<
< 4:
-1288c1380
+1066,1069d1147
+< 154 LOAD_LOCAL(value x5)
+< 154 CALL_METHOD MyException.message (dynamic)
+< 154 STORE_LOCAL(value message)
+< 154 SCOPE_ENTER value message
+1071c1149,1150
+< 154 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+1288c1367
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1312c1404,1411
+1312c1391,1398
< 38 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
@@ -236,16 +315,20 @@
> 42 CONSTANT("IllegalArgumentException")
> 42 CALL_METHOD scala.Predef.println (dynamic)
> 42 JUMP 2
-1361c1460
+1359c1445
+< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
+---
+> locals: value args, variable result, value ex6, value x4, value x5, value x
+1361c1447
< blocks: [1,2,3,4,5,8,11,13,14,16,17,19]
---
> blocks: [1,2,3,5,8,11,13,14,16,17,19,20]
-1385c1484,1485
+1385c1471,1472
< 203 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
> ? JUMP 20
-1405c1505,1514
+1405c1492,1501
< 209 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -258,15 +341,25 @@
> 212 LOAD_LOCAL(value x4)
> 212 IS_INSTANCE REF(class MyException)
> 212 CZJUMP (BOOL)NE ? 5 : 11
-1418,1420d1526
+1418,1420d1513
< 200 JUMP 4
<
< 4:
-1483c1589
+1434,1437d1526
+< 212 LOAD_LOCAL(value x5)
+< 212 CALL_METHOD MyException.message (dynamic)
+< 212 STORE_LOCAL(value message)
+< 212 SCOPE_ENTER value message
+1439c1528,1529
+< 213 LOAD_LOCAL(value message)
+---
+> ? LOAD_LOCAL(value x5)
+> ? CALL_METHOD MyException.message (dynamic)
+1483c1573
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1507c1613,1620
+1507c1597,1604
< 58 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
@@ -277,11 +370,11 @@
> 62 CONSTANT("RuntimeException")
> 62 CALL_METHOD scala.Predef.println (dynamic)
> 62 JUMP 2
-1556c1669
+1556c1653
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1576c1689,1694
+1576c1673,1678
< 229 THROW(MyException)
---
> ? JUMP 5
@@ -290,19 +383,19 @@
> ? LOAD_LOCAL(variable monitor1)
> 228 MONITOR_EXIT
> 228 THROW(Throwable)
-1582c1700
+1582c1684
< ? THROW(Throwable)
---
> 228 THROW(Throwable)
-1610c1728
+1610c1712
< locals: value args, variable result, variable monitor2, variable monitorResult1
---
> locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
-1612c1730
+1612c1714
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1635c1753,1761
+1635c1737,1745
< 245 THROW(MyException)
---
> ? STORE_LOCAL(value exception$1)
@@ -314,7 +407,7 @@
> ? LOAD_LOCAL(variable monitor2)
> 244 MONITOR_EXIT
> 244 THROW(Throwable)
-1641c1767
+1641c1751
< ? THROW(Throwable)
---
> 244 THROW(Throwable)
diff --git a/test/files/run/interop_classtags_are_classmanifests.check b/test/files/run/interop_classtags_are_classmanifests.check
index 7a0a829af2..5a8fc2b782 100644
--- a/test/files/run/interop_classtags_are_classmanifests.check
+++ b/test/files/run/interop_classtags_are_classmanifests.check
@@ -1,3 +1,3 @@
-ClassTag[int]
-ClassTag[class java.lang.String]
-ClassTag[class [I]
+Int
+java.lang.String
+Array[int]
diff --git a/test/files/run/interop_manifests_are_abstypetags.check b/test/files/run/interop_manifests_are_abstypetags.check
index c59e92d4eb..19a35ad3db 100644
--- a/test/files/run/interop_manifests_are_abstypetags.check
+++ b/test/files/run/interop_manifests_are_abstypetags.check
@@ -1,3 +1,3 @@
-Int
-java.lang.String
-Array[Int]
+Int
+java.lang.String
+Array[Int]
diff --git a/test/files/run/interop_manifests_are_abstypetags.scala b/test/files/run/interop_manifests_are_abstypetags.scala
index 1cba2fdb4b..f2c2723106 100644
--- a/test/files/run/interop_manifests_are_abstypetags.scala
+++ b/test/files/run/interop_manifests_are_abstypetags.scala
@@ -1,11 +1,11 @@
import scala.reflect.runtime.universe._
object Test extends App {
- def manifestIsAbsTypeTag[T: Manifest] = {
- println(implicitly[AbsTypeTag[T]].tpe)
+ def manifestIsWeakTypeTag[T: Manifest] = {
+ println(implicitly[WeakTypeTag[T]].tpe)
}
- manifestIsAbsTypeTag[Int]
- manifestIsAbsTypeTag[String]
- manifestIsAbsTypeTag[Array[Int]]
+ manifestIsWeakTypeTag[Int]
+ manifestIsWeakTypeTag[String]
+ manifestIsWeakTypeTag[Array[Int]]
} \ No newline at end of file
diff --git a/test/files/run/interop_manifests_are_classtags.check b/test/files/run/interop_manifests_are_classtags.check
index aaf4aba012..f3f704121b 100644
--- a/test/files/run/interop_manifests_are_classtags.check
+++ b/test/files/run/interop_manifests_are_classtags.check
@@ -1,18 +1,18 @@
-Int
-List()
-List(0, 0, 0, 0, 0)
-java.lang.String
-List()
-List(null, null, null, null, null)
-Array[Int]
-List()
-List(null, null, null, null, null)
-Int
-List()
-List(0, 0, 0, 0, 0)
-java.lang.String
-List()
-List(null, null, null, null, null)
-Array[Int]
-List()
-List(null, null, null, null, null)
+Int
+List()
+List(0, 0, 0, 0, 0)
+java.lang.String
+List()
+List(null, null, null, null, null)
+Array[Int]
+List()
+List(null, null, null, null, null)
+Int
+List()
+List(0, 0, 0, 0, 0)
+java.lang.String
+List()
+List(null, null, null, null, null)
+Array[Int]
+List()
+List(null, null, null, null, null)
diff --git a/test/files/run/interop_manifests_are_typetags.check b/test/files/run/interop_manifests_are_typetags.check
index c59e92d4eb..19a35ad3db 100644
--- a/test/files/run/interop_manifests_are_typetags.check
+++ b/test/files/run/interop_manifests_are_typetags.check
@@ -1,3 +1,3 @@
-Int
-java.lang.String
-Array[Int]
+Int
+java.lang.String
+Array[Int]
diff --git a/test/files/run/interop_typetags_are_manifests.check b/test/files/run/interop_typetags_are_manifests.check
index 871167e3ab..e02de1fdc2 100644
--- a/test/files/run/interop_typetags_are_manifests.check
+++ b/test/files/run/interop_typetags_are_manifests.check
@@ -1,3 +1,3 @@
-int
-java.lang.String
-Array[Int]
+int
+java.lang.String
+Array[Int]
diff --git a/test/files/run/macro-abort-fresh.check b/test/files/run/macro-abort-fresh.check
index 28057c2883..75ad5e79f8 100644
--- a/test/files/run/macro-abort-fresh.check
+++ b/test/files/run/macro-abort-fresh.check
@@ -1,6 +1,6 @@
-$1$
-qwe1
-qwe2
-reflective compilation has failed:
-
-blargh
+$1$
+qwe1
+qwe2
+reflective compilation has failed:
+
+blargh
diff --git a/test/files/run/macro-abort-fresh/Test_2.scala b/test/files/run/macro-abort-fresh/Test_2.scala
index c6caa2b585..15c498efb0 100644
--- a/test/files/run/macro-abort-fresh/Test_2.scala
+++ b/test/files/run/macro-abort-fresh/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Select(Ident("Macros"), newTermName("foo"))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation.check b/test/files/run/macro-declared-in-annotation.check
index 1ea14b4e20..7658ad2c24 100644
--- a/test/files/run/macro-declared-in-annotation.check
+++ b/test/files/run/macro-declared-in-annotation.check
@@ -1 +1 @@
-it works
+it works
diff --git a/test/files/run/macro-declared-in-block.check b/test/files/run/macro-declared-in-block.check
index a61fd13087..5e687db8bf 100644
--- a/test/files/run/macro-declared-in-block.check
+++ b/test/files/run/macro-declared-in-block.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](<empty>)
-it works
+prefix = Expr[Nothing](<empty>)
+it works
diff --git a/test/files/run/macro-declared-in-class-class.check b/test/files/run/macro-declared-in-class-class.check
index 480c2f906f..47248d7af7 100644
--- a/test/files/run/macro-declared-in-class-class.check
+++ b/test/files/run/macro-declared-in-class-class.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](new Test.this.outer.Macros())
-it works
+prefix = Expr[Nothing](new Test.this.outer.Macros())
+it works
diff --git a/test/files/run/macro-declared-in-class-object.check b/test/files/run/macro-declared-in-class-object.check
index f7ba5a53cb..35af59e40f 100644
--- a/test/files/run/macro-declared-in-class-object.check
+++ b/test/files/run/macro-declared-in-class-object.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](Test.this.outer.Macros)
-it works
+prefix = Expr[Nothing](Test.this.outer.Macros)
+it works
diff --git a/test/files/run/macro-declared-in-class.check b/test/files/run/macro-declared-in-class.check
index 946851e4bb..a1c1d7af8b 100644
--- a/test/files/run/macro-declared-in-class.check
+++ b/test/files/run/macro-declared-in-class.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](new Macros())
-it works
+prefix = Expr[Nothing](new Macros())
+it works
diff --git a/test/files/run/macro-declared-in-default-param.check b/test/files/run/macro-declared-in-default-param.check
index 00052ad018..6decd7aa4d 100644
--- a/test/files/run/macro-declared-in-default-param.check
+++ b/test/files/run/macro-declared-in-default-param.check
@@ -1,5 +1,5 @@
-prefix = Expr[Nothing](<empty>)
-it works
-it works
-prefix = Expr[Nothing](<empty>)
-it works
+prefix = Expr[Nothing](<empty>)
+it works
+it works
+prefix = Expr[Nothing](<empty>)
+it works
diff --git a/test/files/run/macro-declared-in-implicit-class.check b/test/files/run/macro-declared-in-implicit-class.check
index b3640ceaa7..5dc968c08c 100644
--- a/test/files/run/macro-declared-in-implicit-class.check
+++ b/test/files/run/macro-declared-in-implicit-class.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](Macros.foo("2"))
-Some(2)
+prefix = Expr[Nothing](Macros.foo("2"))
+Some(2)
diff --git a/test/files/run/macro-declared-in-method.check b/test/files/run/macro-declared-in-method.check
index a61fd13087..5e687db8bf 100644
--- a/test/files/run/macro-declared-in-method.check
+++ b/test/files/run/macro-declared-in-method.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](<empty>)
-it works
+prefix = Expr[Nothing](<empty>)
+it works
diff --git a/test/files/run/macro-declared-in-object-class.check b/test/files/run/macro-declared-in-object-class.check
index 480c2f906f..47248d7af7 100644
--- a/test/files/run/macro-declared-in-object-class.check
+++ b/test/files/run/macro-declared-in-object-class.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](new Test.this.outer.Macros())
-it works
+prefix = Expr[Nothing](new Test.this.outer.Macros())
+it works
diff --git a/test/files/run/macro-declared-in-object-object.check b/test/files/run/macro-declared-in-object-object.check
index f7ba5a53cb..35af59e40f 100644
--- a/test/files/run/macro-declared-in-object-object.check
+++ b/test/files/run/macro-declared-in-object-object.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](Test.this.outer.Macros)
-it works
+prefix = Expr[Nothing](Test.this.outer.Macros)
+it works
diff --git a/test/files/run/macro-declared-in-object.check b/test/files/run/macro-declared-in-object.check
index 05a8cc48ea..4d955a96b1 100644
--- a/test/files/run/macro-declared-in-object.check
+++ b/test/files/run/macro-declared-in-object.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](Macros)
-it works
+prefix = Expr[Nothing](Macros)
+it works
diff --git a/test/files/run/macro-declared-in-package-object.check b/test/files/run/macro-declared-in-package-object.check
index 6f797f3c68..bc0069178d 100644
--- a/test/files/run/macro-declared-in-package-object.check
+++ b/test/files/run/macro-declared-in-package-object.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](Macros.`package`)
-it works
+prefix = Expr[Nothing](Macros.`package`)
+it works
diff --git a/test/files/run/macro-declared-in-refinement.check b/test/files/run/macro-declared-in-refinement.check
index 861cd43b01..09b8d015a6 100644
--- a/test/files/run/macro-declared-in-refinement.check
+++ b/test/files/run/macro-declared-in-refinement.check
@@ -1,2 +1,2 @@
-prefix = Expr[Nothing](Test.this.macros)
-it works
+prefix = Expr[Nothing](Test.this.macros)
+it works
diff --git a/test/files/run/macro-declared-in-trait.check b/test/files/run/macro-declared-in-trait.check
index d5d9e4e457..104ff1e99b 100644
--- a/test/files/run/macro-declared-in-trait.check
+++ b/test/files/run/macro-declared-in-trait.check
@@ -1,15 +1,15 @@
-prefix = Expr[Nothing]({
- final class $anon extends Object with Base {
- def <init>(): anonymous class $anon = {
- $anon.super.<init>();
- ()
- };
- <empty>
- };
- new $anon()
-})
-it works
-prefix = Expr[Nothing](Macros)
-it works
-prefix = Expr[Nothing](new Macros())
-it works
+prefix = Expr[Nothing]({
+ final class $anon extends Object with Base {
+ def <init>(): anonymous class $anon = {
+ $anon.super.<init>();
+ ()
+ };
+ <empty>
+ };
+ new $anon()
+})
+it works
+prefix = Expr[Nothing](Macros)
+it works
+prefix = Expr[Nothing](new Macros())
+it works
diff --git a/test/files/run/macro-def-infer-return-type-b.check b/test/files/run/macro-def-infer-return-type-b.check
index f34d257c82..ae2dc7a06f 100644
--- a/test/files/run/macro-def-infer-return-type-b.check
+++ b/test/files/run/macro-def-infer-return-type-b.check
@@ -1,6 +1,6 @@
-reflective compilation has failed:
-
-exception during macro expansion:
-java.lang.Error: an implementation is missing
- at Impls$.foo(Impls_Macros_1.scala:5)
-
+reflective compilation has failed:
+
+exception during macro expansion:
+java.lang.Error: an implementation is missing
+ at Impls$.foo(Impls_Macros_1.scala:5)
+
diff --git a/test/files/run/macro-def-infer-return-type-b/Test_2.scala b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
index 0f84859545..ef2920a432 100644
--- a/test/files/run/macro-def-infer-return-type-b/Test_2.scala
+++ b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(42))))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-def-path-dependent-a.check b/test/files/run/macro-def-path-dependent-a.check
index 1ea14b4e20..7658ad2c24 100644
--- a/test/files/run/macro-def-path-dependent-a.check
+++ b/test/files/run/macro-def-path-dependent-a.check
@@ -1 +1 @@
-it works
+it works
diff --git a/test/files/run/macro-def-path-dependent-b.check b/test/files/run/macro-def-path-dependent-b.check
index 1ea14b4e20..7658ad2c24 100644
--- a/test/files/run/macro-def-path-dependent-b.check
+++ b/test/files/run/macro-def-path-dependent-b.check
@@ -1 +1 @@
-it works
+it works
diff --git a/test/files/run/macro-def-path-dependent-c.check b/test/files/run/macro-def-path-dependent-c.check
index 1ea14b4e20..7658ad2c24 100644
--- a/test/files/run/macro-def-path-dependent-c.check
+++ b/test/files/run/macro-def-path-dependent-c.check
@@ -1 +1 @@
-it works
+it works
diff --git a/test/files/run/macro-def-path-dependent-d.check b/test/files/run/macro-def-path-dependent-d1.check
index 1ea14b4e20..7658ad2c24 100644
--- a/test/files/run/macro-def-path-dependent-d.check
+++ b/test/files/run/macro-def-path-dependent-d1.check
@@ -1 +1 @@
-it works
+it works
diff --git a/test/files/run/macro-def-path-dependent-d1.flags b/test/files/run/macro-def-path-dependent-d1.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d1.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
index 2daf6fc3fb..69d9708b2a 100644
--- a/test/files/run/macro-def-path-dependent-d/Impls_Macros_1.scala
+++ b/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
@@ -5,5 +5,5 @@ import scala.reflect.api.Universe
object Test {
def materializeTypeTag[T](u: Universe)(e: T) = macro materializeTypeTag_impl[T]
- def materializeTypeTag_impl[T: c.AbsTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
+ def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
} \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d/Test_2.scala b/test/files/run/macro-def-path-dependent-d1/Test_2.scala
index 7dffc5107d..7dffc5107d 100644
--- a/test/files/run/macro-def-path-dependent-d/Test_2.scala
+++ b/test/files/run/macro-def-path-dependent-d1/Test_2.scala
diff --git a/test/files/run/macro-def-path-dependent-d2.check b/test/files/run/macro-def-path-dependent-d2.check
new file mode 100644
index 0000000000..7658ad2c24
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-def-path-dependent-d2.flags b/test/files/run/macro-def-path-dependent-d2.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Impls_1.scala b/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
new file mode 100644
index 0000000000..7fa9c3579e
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+import scala.reflect.api.Universe
+
+object Impls {
+ def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
+} \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Macros_2.scala b/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
new file mode 100644
index 0000000000..65ce4d8bd2
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+import scala.reflect.api.Universe
+
+object Macros {
+ def materializeTypeTag[T](u: Universe)(e: T) = macro Impls.materializeTypeTag_impl[T]
+} \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Test_3.scala b/test/files/run/macro-def-path-dependent-d2/Test_3.scala
new file mode 100644
index 0000000000..7dffc5107d
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2/Test_3.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println("it works")
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument.check b/test/files/run/macro-expand-implicit-argument.check
new file mode 100644
index 0000000000..15a62794a9
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument.check
@@ -0,0 +1 @@
+List(1, 2, 3)
diff --git a/test/files/run/macro-expand-implicit-argument.flags b/test/files/run/macro-expand-implicit-argument.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument/Macros_1.scala b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
new file mode 100644
index 0000000000..b1665256cd
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
@@ -0,0 +1,59 @@
+import annotation.tailrec
+import scala.math.{min, max}
+import scala.{specialized => spec}
+
+import language.experimental.macros
+
+import scala.reflect.ClassTag
+import scala.reflect.macros.Context
+
+object Macros {
+ def alloc[@spec A:ClassTag](src:Array[A], s1:Int, len:Int) = {
+ val as = Array.ofDim[A](len)
+ System.arraycopy(src, s1, as, 0, len)
+ as
+ }
+
+ /**
+ * Efficient alternative to Array.apply.
+ *
+ * "As seen on scala-internals!"
+ */
+ def array[A](as:A*)(implicit ct: ClassTag[A]) = macro arrayMacro[A]
+
+ /**
+ * Takes in something like:
+ * ArrayUtil.alloc[Int](11, 22, 33, 44)(ct)
+ *
+ * and builds a tree like:
+ * {
+ * val arr:Array[Int] = ct.newArray(4)
+ * arr.update(0, 11)
+ * arr.update(1, 22)
+ * arr.update(2, 33)
+ * arr.update(3, 44)
+ * arr
+ * }
+ */
+ def arrayMacro[A:c.WeakTypeTag](c:Context)(as:c.Expr[A]*)(ct: c.Expr[ClassTag[A]]): c.Expr[Array[A]] = {
+ import c.mirror._
+ import c.universe._
+ def const(x:Int) = Literal(Constant(x))
+
+ val n = as.length
+ val arr = newTermName("arr")
+
+ val create = Apply(Select(ct.tree, "newArray"), List(const(n)))
+ val arrtpe = TypeTree(implicitly[c.WeakTypeTag[Array[A]]].tpe)
+ val valdef = ValDef(Modifiers(), arr, arrtpe, create)
+
+ val updates = (0 until n).map {
+ i => Apply(Select(Ident(arr), "update"), List(const(i), as(i).tree))
+ }
+
+ val exprs = Seq(valdef) ++ updates ++ Seq(Ident(arr))
+ val block = Block(exprs:_*)
+
+ c.Expr[Array[A]](block)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument/Test_2.scala b/test/files/run/macro-expand-implicit-argument/Test_2.scala
new file mode 100644
index 0000000000..ce8a068fb4
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ println(array(1, 2, 3).toList)
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit.check b/test/files/run/macro-expand-implicit-macro-has-implicit.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/macro-expand-implicit-macro-has-implicit.check
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit.check b/test/files/run/macro-expand-implicit-macro-is-implicit.check
index 42abf4579b..c205945d05 100644
--- a/test/files/run/macro-expand-implicit-macro-is-implicit.check
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit.check
@@ -1,2 +1,2 @@
-Some(2)
-2
+Some(2)
+2
diff --git a/test/files/run/macro-expand-implicit-macro-is-val.check b/test/files/run/macro-expand-implicit-macro-is-val.check
index 78c6baefdd..0cfbf08886 100644
--- a/test/files/run/macro-expand-implicit-macro-is-val.check
+++ b/test/files/run/macro-expand-implicit-macro-is-val.check
@@ -1 +1 @@
-2
+2
diff --git a/test/files/run/macro-expand-implicit-macro-is-view.check b/test/files/run/macro-expand-implicit-macro-is-view.check
index 78c6baefdd..0cfbf08886 100644
--- a/test/files/run/macro-expand-implicit-macro-is-view.check
+++ b/test/files/run/macro-expand-implicit-macro-is-view.check
@@ -1 +1 @@
-2
+2
diff --git a/test/files/run/macro-expand-nullary-generic.check b/test/files/run/macro-expand-nullary-generic.check
index 0895c30c3f..133840c469 100644
--- a/test/files/run/macro-expand-nullary-generic.check
+++ b/test/files/run/macro-expand-nullary-generic.check
@@ -1,6 +1,6 @@
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-kkthxbai
+it works TypeTag[Int]
+it works TypeTag[Int]
+it works TypeTag[Int]
+it works TypeTag[Int]
+it works TypeTag[Int]
+kkthxbai
diff --git a/test/files/run/macro-expand-nullary-generic/Impls_1.scala b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
index fbbc23a824..1180c83a40 100644
--- a/test/files/run/macro-expand-nullary-generic/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
@@ -2,14 +2,14 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def impl[T: c.AbsTypeTag](c: Ctx) = {
+ def impl[T: c.WeakTypeTag](c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works " + implicitly[c.AbsTypeTag[T]]))))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works " + implicitly[c.WeakTypeTag[T]]))))
c.Expr[Unit](body)
}
- def fooNullary[T: c.AbsTypeTag](c: Ctx) = impl[T](c)
- def fooEmpty[T: c.AbsTypeTag](c: Ctx)() = impl[T](c)
- def barNullary[T: c.AbsTypeTag](c: Ctx)(x: c.Expr[Int]) = impl[T](c)
- def barEmpty[T: c.AbsTypeTag](c: Ctx)(x: c.Expr[Int])() = impl[T](c)
+ def fooNullary[T: c.WeakTypeTag](c: Ctx) = impl[T](c)
+ def fooEmpty[T: c.WeakTypeTag](c: Ctx)() = impl[T](c)
+ def barNullary[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int]) = impl[T](c)
+ def barEmpty[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int])() = impl[T](c)
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-overload.check b/test/files/run/macro-expand-overload.check
index 9d9989d85f..a2b7b0e781 100644
--- a/test/files/run/macro-expand-overload.check
+++ b/test/files/run/macro-expand-overload.check
@@ -1,6 +1,6 @@
-(fooObjectString,Expr[Nothing](Macros),42)
-(fooObjectInt,Expr[Nothing](Macros),42)
-fooObjectBoolean
-(fooClassString,Expr[Nothing](new Macros()),42)
-(fooClassInt,Expr[Nothing](new Macros()),42)
-fooClassBoolean
+(fooObjectString,Expr[Nothing](Macros),42)
+(fooObjectInt,Expr[Nothing](Macros),42)
+fooObjectBoolean
+(fooClassString,Expr[Nothing](new Macros()),42)
+(fooClassInt,Expr[Nothing](new Macros()),42)
+fooClassBoolean
diff --git a/test/files/run/macro-expand-override.check b/test/files/run/macro-expand-override.check
index 486bec7098..b41dc156c4 100644
--- a/test/files/run/macro-expand-override.check
+++ b/test/files/run/macro-expand-override.check
@@ -1,15 +1,15 @@
-(fooBString,Expr[Nothing](Test.this.dd),42)
-(fooDInt,Expr[Nothing](Test.this.dd),42)
-fooBBoolean
-(fooBString,Expr[Nothing](Test.this.db),42)
-(fooBInt,Expr[Nothing](Test.this.db),42)
-fooBBoolean
-(fooZString,Expr[Nothing](Test.this.zz),42)
-(fooDInt,Expr[Nothing](Test.this.zz),42)
-fooZBoolean
-(fooBString,Expr[Nothing](Test.this.zd),42)
-(fooDInt,Expr[Nothing](Test.this.zd),42)
-fooZBoolean
-(fooBString,Expr[Nothing](Test.this.zb),42)
-(fooBInt,Expr[Nothing](Test.this.zb),42)
-fooZBoolean
+(fooBString,Expr[Nothing](Test.this.dd),42)
+(fooDInt,Expr[Nothing](Test.this.dd),42)
+fooBBoolean
+(fooBString,Expr[Nothing](Test.this.db),42)
+(fooBInt,Expr[Nothing](Test.this.db),42)
+fooBBoolean
+(fooZString,Expr[Nothing](Test.this.zz),42)
+(fooDInt,Expr[Nothing](Test.this.zz),42)
+fooZBoolean
+(fooBString,Expr[Nothing](Test.this.zd),42)
+(fooDInt,Expr[Nothing](Test.this.zd),42)
+fooZBoolean
+(fooBString,Expr[Nothing](Test.this.zb),42)
+(fooBInt,Expr[Nothing](Test.this.zb),42)
+fooZBoolean
diff --git a/test/files/run/macro-expand-recursive.check b/test/files/run/macro-expand-recursive.check
index 1ea14b4e20..7658ad2c24 100644
--- a/test/files/run/macro-expand-recursive.check
+++ b/test/files/run/macro-expand-recursive.check
@@ -1 +1 @@
-it works
+it works
diff --git a/test/files/run/macro-expand-tparams-explicit.check b/test/files/run/macro-expand-tparams-explicit.check
index 19e6d39bc8..e7e6718406 100644
--- a/test/files/run/macro-expand-tparams-explicit.check
+++ b/test/files/run/macro-expand-tparams-explicit.check
@@ -1 +1 @@
-TypeTag[Int]
+TypeTag[Int]
diff --git a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
index 0a879687e8..72b420d92f 100644
--- a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
@@ -2,9 +2,9 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U: c.AbsTypeTag](c: Ctx) = {
+ def foo[U: c.WeakTypeTag](c: Ctx) = {
import c.universe._
- val U = implicitly[c.AbsTypeTag[U]]
+ val U = implicitly[c.WeakTypeTag[U]]
val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-tparams-implicit.check b/test/files/run/macro-expand-tparams-implicit.check
index 80c6b826ba..fa6b335afb 100644
--- a/test/files/run/macro-expand-tparams-implicit.check
+++ b/test/files/run/macro-expand-tparams-implicit.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-TypeTag[String]
+TypeTag[Int]
+WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
index f6cb63b9c9..33770516df 100644
--- a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
@@ -2,9 +2,9 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U: c.AbsTypeTag](c: Ctx)(x: c.Expr[U]) = {
+ def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
import c.universe._
- val U = implicitly[c.AbsTypeTag[U]]
+ val U = implicitly[c.WeakTypeTag[U]]
val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-tparams-optional.check b/test/files/run/macro-expand-tparams-optional.check
index 3bacd7a4e0..b4a0f394c1 100644
--- a/test/files/run/macro-expand-tparams-optional.check
+++ b/test/files/run/macro-expand-tparams-optional.check
@@ -1 +1 @@
-don't know U
+don't know U
diff --git a/test/files/run/macro-expand-tparams-prefix-a.check b/test/files/run/macro-expand-tparams-prefix-a.check
index 6c23b47d64..0bf3c55bbe 100644
--- a/test/files/run/macro-expand-tparams-prefix-a.check
+++ b/test/files/run/macro-expand-tparams-prefix-a.check
@@ -1,4 +1,4 @@
-TypeTag[Int]
-TypeTag[Int]
-TypeTag[String]
-TypeTag[Boolean]
+TypeTag[Int]
+TypeTag[Int]
+WeakTypeTag[String]
+TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
index f6cb63b9c9..33770516df 100644
--- a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
@@ -2,9 +2,9 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U: c.AbsTypeTag](c: Ctx)(x: c.Expr[U]) = {
+ def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
import c.universe._
- val U = implicitly[c.AbsTypeTag[U]]
+ val U = implicitly[c.WeakTypeTag[U]]
val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-tparams-prefix-b.check b/test/files/run/macro-expand-tparams-prefix-b.check
index 67dabff11e..77c2ee9051 100644
--- a/test/files/run/macro-expand-tparams-prefix-b.check
+++ b/test/files/run/macro-expand-tparams-prefix-b.check
@@ -1,2 +1,2 @@
-TypeTag[Boolean] TypeTag[Int]
-TypeTag[Boolean] TypeTag[String]
+TypeTag[Boolean] TypeTag[Int]
+TypeTag[Boolean] WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
index 7e0fa26569..9378e67712 100644
--- a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
@@ -2,10 +2,10 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T: c.AbsTypeTag, U: c.AbsTypeTag](c: Ctx)(x: c.Expr[U]) = {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
import c.universe._
- val T = implicitly[c.AbsTypeTag[T]]
- val U = implicitly[c.AbsTypeTag[U]]
+ val T = implicitly[c.WeakTypeTag[T]]
+ val U = implicitly[c.WeakTypeTag[U]]
val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.check b/test/files/run/macro-expand-tparams-prefix-c1.check
index 8d1c4e3416..f0dd5b9cd8 100644
--- a/test/files/run/macro-expand-tparams-prefix-c1.check
+++ b/test/files/run/macro-expand-tparams-prefix-c1.check
@@ -1,3 +1,3 @@
-TypeTag[Int]
-TypeTag[String]
-TypeTag[Boolean]
+TypeTag[Int]
+WeakTypeTag[String]
+TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
index ca515be627..afdd7d4f7a 100644
--- a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
@@ -2,11 +2,11 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T, U: c.AbsTypeTag, V](c: Ctx)(implicit T: c.AbsTypeTag[T], V: c.AbsTypeTag[V]): c.Expr[Unit] = {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
c.Expr(Block(List(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.AbsTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(()))))
}
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.check b/test/files/run/macro-expand-tparams-prefix-c2.check
index 8d1c4e3416..f0dd5b9cd8 100644
--- a/test/files/run/macro-expand-tparams-prefix-c2.check
+++ b/test/files/run/macro-expand-tparams-prefix-c2.check
@@ -1,3 +1,3 @@
-TypeTag[Int]
-TypeTag[String]
-TypeTag[Boolean]
+TypeTag[Int]
+WeakTypeTag[String]
+TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
index 5a554590d8..3c2838208a 100644
--- a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
@@ -2,11 +2,11 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T, U: c.AbsTypeTag, V](c: Ctx)(implicit T: c.AbsTypeTag[T], V: c.AbsTypeTag[V]): c.Expr[Unit] = {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
c.Expr(Block(List(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.AbsTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(()))))
}
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.check b/test/files/run/macro-expand-tparams-prefix-d1.check
index 319edfbed0..c5aaaf5a09 100644
--- a/test/files/run/macro-expand-tparams-prefix-d1.check
+++ b/test/files/run/macro-expand-tparams-prefix-d1.check
@@ -1,3 +1,3 @@
-AbsTypeTag[T]
-AbsTypeTag[U]
-TypeTag[Boolean]
+WeakTypeTag[T]
+WeakTypeTag[U]
+TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
index ca515be627..afdd7d4f7a 100644
--- a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
@@ -2,11 +2,11 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T, U: c.AbsTypeTag, V](c: Ctx)(implicit T: c.AbsTypeTag[T], V: c.AbsTypeTag[V]): c.Expr[Unit] = {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
c.Expr(Block(List(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.AbsTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(()))))
}
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
index fd1d654cf8..1c3ecfdefb 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
@@ -1,4 +1,4 @@
-reflective compilation has failed:
-
+reflective compilation has failed:
+
no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
+(such annotations are only allowed in arguments to *-parameters)
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
index c0319fcd6c..16d2c1e6ac 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
@@ -7,6 +7,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Apply(Select(Ident("Macros"), newTermName("foo")), List(Typed(Apply(Ident(definitions.ListModule), List(Literal(Constant(1)), Literal(Constant(2)))), Ident(tpnme.WILDCARD_STAR))))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check
index 835137b4a2..fe90caed3c 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check
@@ -1 +1 @@
-List(1, 2, 3, 4, 5)
+List(1, 2, 3, 4, 5)
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs.check b/test/files/run/macro-expand-varargs-explicit-over-varargs.check
index 835137b4a2..fe90caed3c 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-varargs.check
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs.check
@@ -1 +1 @@
-List(1, 2, 3, 4, 5)
+List(1, 2, 3, 4, 5)
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check
index 0a6596858c..bcfab19847 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check
@@ -1 +1 @@
-(1,2,3,4,5)
+(1,2,3,4,5)
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs.check b/test/files/run/macro-expand-varargs-implicit-over-varargs.check
index f25fa141d3..2c174a8a99 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-varargs.check
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs.check
@@ -1 +1 @@
-WrappedArray(1, 2, 3, 4, 5)
+WrappedArray(1, 2, 3, 4, 5)
diff --git a/test/files/run/macro-impl-default-params.check b/test/files/run/macro-impl-default-params.check
index eaf94458e6..b32e345706 100644
--- a/test/files/run/macro-impl-default-params.check
+++ b/test/files/run/macro-impl-default-params.check
@@ -1,5 +1,5 @@
-foo_targs:
-invoking foo_targs...
-type of prefix is: Nothing
-type of prefix tree is: Macros[Int]
-U is: String
+foo_targs:
+invoking foo_targs...
+type of prefix is: Nothing
+type of prefix tree is: Macros[Int]
+U is: String
diff --git a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
index 06c58d96ab..db77b1923a 100644
--- a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
@@ -2,10 +2,10 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo_targs[T, U: c.AbsTypeTag](c: Ctx = null)(x: c.Expr[Int] = null) = {
+ def foo_targs[T, U: c.WeakTypeTag](c: Ctx = null)(x: c.Expr[Int] = null) = {
import c.{prefix => prefix}
import c.universe._
- val U = implicitly[c.AbsTypeTag[U]]
+ val U = implicitly[c.WeakTypeTag[U]]
val body = Block(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
diff --git a/test/files/run/macro-impl-rename-context.check b/test/files/run/macro-impl-rename-context.check
index 753edcd970..6a34e5fd87 100644
--- a/test/files/run/macro-impl-rename-context.check
+++ b/test/files/run/macro-impl-rename-context.check
@@ -1,2 +1,2 @@
-foo
-invoking foo...
+foo
+invoking foo...
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
index e21e05157a..916195145e 100644
--- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
@@ -1,5 +1,5 @@
-reflective compilation has failed:
-
+reflective compilation has failed:
+
type mismatch;
found : String("42")
- required: Int
+ required: Int
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
index c6caa2b585..15c498efb0 100644
--- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Select(Ident("Macros"), newTermName("foo"))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-invalidret-nontypeable.check b/test/files/run/macro-invalidret-nontypeable.check
index eee08528e2..cf7acb0d60 100644
--- a/test/files/run/macro-invalidret-nontypeable.check
+++ b/test/files/run/macro-invalidret-nontypeable.check
@@ -1,3 +1,3 @@
-reflective compilation has failed:
-
-not found: value IDoNotExist
+reflective compilation has failed:
+
+not found: value IDoNotExist
diff --git a/test/files/run/macro-invalidret-nontypeable/Test_2.scala b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
index c6caa2b585..15c498efb0 100644
--- a/test/files/run/macro-invalidret-nontypeable/Test_2.scala
+++ b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Select(Ident("Macros"), newTermName("foo"))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-badret.check b/test/files/run/macro-invalidusage-badret.check
index 5bdc484644..221732eefc 100644
--- a/test/files/run/macro-invalidusage-badret.check
+++ b/test/files/run/macro-invalidusage-badret.check
@@ -1,5 +1,5 @@
-reflective compilation has failed:
-
+reflective compilation has failed:
+
type mismatch;
found : Int(42)
- required: String
+ required: String
diff --git a/test/files/run/macro-invalidusage-badret/Test_2.scala b/test/files/run/macro-invalidusage-badret/Test_2.scala
index 8322e8a4e0..f3a76f3fff 100644
--- a/test/files/run/macro-invalidusage-badret/Test_2.scala
+++ b/test/files/run/macro-invalidusage-badret/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Typed(Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(42)))), Ident(newTypeName("String")))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
index 73f57b0b81..f1d5e925fa 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
@@ -1,3 +1,3 @@
-reflective compilation has failed:
-
-macros cannot be partially applied
+reflective compilation has failed:
+
+macros cannot be partially applied
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
index a54b7f4b08..4583a726cf 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T: c.AbsTypeTag](c: Ctx)(x: c.Expr[T]) = {
+ def foo[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[T]) = {
import c.universe._
val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(x.tree.toString))))
c.Expr[Unit](body)
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
index f51cc7e699..c91fd7d380 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Select(Ident("Macros"), newTermName("foo"))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-invalidusage-partialapplication.check b/test/files/run/macro-invalidusage-partialapplication.check
index 73f57b0b81..f1d5e925fa 100644
--- a/test/files/run/macro-invalidusage-partialapplication.check
+++ b/test/files/run/macro-invalidusage-partialapplication.check
@@ -1,3 +1,3 @@
-reflective compilation has failed:
-
-macros cannot be partially applied
+reflective compilation has failed:
+
+macros cannot be partially applied
diff --git a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
index 64020b2aa9..cbfee725e2 100644
--- a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
+++ b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(40))))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-openmacros.check b/test/files/run/macro-openmacros.check
index a4b06a1e1a..ba0ae3ff42 100644
--- a/test/files/run/macro-openmacros.check
+++ b/test/files/run/macro-openmacros.check
@@ -1,3 +1,3 @@
-List(MacroContext(foo@source-Test_2.scala,line-2,offset=35 +0))
-List(MacroContext(foo@source-Test_2.scala,line-2,offset=35 +1), MacroContext(foo@source-Test_2.scala,line-2,offset=35 +0))
-List(MacroContext(foo@source-Test_2.scala,line-2,offset=35 +2), MacroContext(foo@source-Test_2.scala,line-2,offset=35 +1), MacroContext(foo@source-Test_2.scala,line-2,offset=35 +0))
+List(MacroContext(foo@source-Test_2.scala,line-2,offset=35 +0))
+List(MacroContext(foo@source-Test_2.scala,line-2,offset=35 +1), MacroContext(foo@source-Test_2.scala,line-2,offset=35 +0))
+List(MacroContext(foo@source-Test_2.scala,line-2,offset=35 +2), MacroContext(foo@source-Test_2.scala,line-2,offset=35 +1), MacroContext(foo@source-Test_2.scala,line-2,offset=35 +0))
diff --git a/test/files/run/macro-openmacros/Impls_Macros_1.scala b/test/files/run/macro-openmacros/Impls_Macros_1.scala
index 38d46c5185..b863ac048b 100644
--- a/test/files/run/macro-openmacros/Impls_Macros_1.scala
+++ b/test/files/run/macro-openmacros/Impls_Macros_1.scala
@@ -4,8 +4,7 @@ object Macros {
def impl(c: Context): c.Expr[Unit] = {
// we're macros, so we can reflect against our source path
// so we don't need any partests to clean up after us!
- val c.CompilationUnit(file, _, _) = c.enclosingUnit
- val dir = file.getCanonicalFile.getParentFile
+ val dir = c.enclosingUnit.source.file.file.getCanonicalFile.getParentFile
def normalizePaths(s: String) = {
val base = (dir.getCanonicalPath + java.io.File.separator).replace('\\', '/')
var regex = """\Q%s\E""" format base
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
index 67666a632b..373c6a6fca 100644
--- a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
@@ -3,5 +3,5 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(42))))
- println(cm.mkToolBox().runExpr(tree))
+ println(cm.mkToolBox().eval(tree))
}
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
index 7a2c0a6fa9..089f30f389 100644
--- a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
@@ -16,5 +16,5 @@ object Test extends App {
val macroapp = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(42))))
val tree = Block(macrodef, module, macroapp)
val toolbox = cm.mkToolBox(options = "-language:experimental.macros")
- println(toolbox.runExpr(tree))
+ println(toolbox.eval(tree))
}
diff --git a/test/files/run/macro-reify-abstypetag-notypeparams.check b/test/files/run/macro-reify-abstypetag-notypeparams.check
index aadd713bef..7732c10d75 100644
--- a/test/files/run/macro-reify-abstypetag-notypeparams.check
+++ b/test/files/run/macro-reify-abstypetag-notypeparams.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-TypeTag[List[Int]]
+TypeTag[Int]
+TypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala b/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala
index 34f742b9fb..73c2d05921 100644
--- a/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala
+++ b/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala
@@ -1,6 +1,6 @@
import scala.reflect.runtime.universe._
object Test extends App {
- println(implicitly[AbsTypeTag[Int]])
- println(implicitly[AbsTypeTag[List[Int]]])
+ println(implicitly[WeakTypeTag[Int]])
+ println(implicitly[WeakTypeTag[List[Int]]])
} \ No newline at end of file
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-notags.check b/test/files/run/macro-reify-abstypetag-typeparams-notags.check
index be62b59a69..a741d11ebd 100644
--- a/test/files/run/macro-reify-abstypetag-typeparams-notags.check
+++ b/test/files/run/macro-reify-abstypetag-typeparams-notags.check
@@ -1,2 +1,2 @@
-AbsTypeTag[T]
-AbsTypeTag[List[T]]
+WeakTypeTag[T]
+WeakTypeTag[List[T]]
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala b/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala
index 0b42cc07b5..4ba2231d9a 100644
--- a/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala
+++ b/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala
@@ -2,8 +2,8 @@ import scala.reflect.runtime.universe._
object Test extends App {
def fooNoTypeTag[T] = {
- println(implicitly[AbsTypeTag[T]])
- println(implicitly[AbsTypeTag[List[T]]])
+ println(implicitly[WeakTypeTag[T]])
+ println(implicitly[WeakTypeTag[List[T]]])
}
fooNoTypeTag[Int]
} \ No newline at end of file
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-tags.check b/test/files/run/macro-reify-abstypetag-typeparams-tags.check
index d6ca5a85ef..e225e57757 100644
--- a/test/files/run/macro-reify-abstypetag-typeparams-tags.check
+++ b/test/files/run/macro-reify-abstypetag-typeparams-tags.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-AbsTypeTag[List[Int]]
+TypeTag[Int]
+WeakTypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala b/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala
index 82a7b7971d..70ca615e1f 100644
--- a/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala
+++ b/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala
@@ -1,9 +1,9 @@
import scala.reflect.runtime.universe._
object Test extends App {
- def fooTypeTag[T: AbsTypeTag] = {
- println(implicitly[AbsTypeTag[T]])
- println(implicitly[AbsTypeTag[List[T]]])
+ def fooTypeTag[T: WeakTypeTag] = {
+ println(implicitly[WeakTypeTag[T]])
+ println(implicitly[WeakTypeTag[List[T]]])
}
fooTypeTag[Int]
} \ No newline at end of file
diff --git a/test/files/run/macro-reify-abstypetag-usetypetag.check b/test/files/run/macro-reify-abstypetag-usetypetag.check
index d6ca5a85ef..e225e57757 100644
--- a/test/files/run/macro-reify-abstypetag-usetypetag.check
+++ b/test/files/run/macro-reify-abstypetag-usetypetag.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-AbsTypeTag[List[Int]]
+TypeTag[Int]
+WeakTypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala b/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala
index 3c62725c42..ecae4110a8 100644
--- a/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala
+++ b/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala
@@ -2,8 +2,8 @@ import scala.reflect.runtime.universe._
object Test extends App {
def fooTypeTag[T: TypeTag] = {
- println(implicitly[AbsTypeTag[T]])
- println(implicitly[AbsTypeTag[List[T]]])
+ println(implicitly[WeakTypeTag[T]])
+ println(implicitly[WeakTypeTag[List[T]]])
}
fooTypeTag[Int]
} \ No newline at end of file
diff --git a/test/files/run/macro-reify-basic.check b/test/files/run/macro-reify-basic.check
index f35d3e67b4..3b18e512db 100644
--- a/test/files/run/macro-reify-basic.check
+++ b/test/files/run/macro-reify-basic.check
@@ -1 +1 @@
-hello world
+hello world
diff --git a/test/files/run/macro-reify-freevars.check b/test/files/run/macro-reify-freevars.check
index 2e925f9fa8..f13806889e 100644
--- a/test/files/run/macro-reify-freevars.check
+++ b/test/files/run/macro-reify-freevars.check
@@ -1,3 +1,3 @@
-reflective compilation has failed:
-
-Macro expansion contains free term variable code defined by map in Macros_1.scala:9:9. Have you forgotten to use splice when splicing this variable into a reifee? If you have troubles tracking free term variables, consider using -Xlog-free-terms
+reflective compilation has failed:
+
+Macro expansion contains free term variable code defined by map in Macros_1.scala:9:9. Have you forgotten to use splice when splicing this variable into a reifee? If you have troubles tracking free term variables, consider using -Xlog-free-terms
diff --git a/test/files/run/macro-reify-freevars/Macros_1.scala b/test/files/run/macro-reify-freevars/Macros_1.scala
index 57fdc32437..20f80c06d1 100644
--- a/test/files/run/macro-reify-freevars/Macros_1.scala
+++ b/test/files/run/macro-reify-freevars/Macros_1.scala
@@ -1,7 +1,7 @@
package scala.collection.slick
object QueryableMacros{
- def map[T:c.AbsTypeTag, S:c.AbsTypeTag]
+ def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
(c: scala.reflect.macros.Context)
(projection: c.Expr[T => S])
: c.Expr[scala.collection.slick.Queryable[S]] = {
diff --git a/test/files/run/macro-reify-freevars/Test_2.scala b/test/files/run/macro-reify-freevars/Test_2.scala
index 603cf10d41..e24758cfb4 100644
--- a/test/files/run/macro-reify-freevars/Test_2.scala
+++ b/test/files/run/macro-reify-freevars/Test_2.scala
@@ -6,6 +6,6 @@ object Test extends App {
val x = ValDef(NoMods, newTermName("x"), Ident("Int"), EmptyTree)
val fn = Function(List(x), Apply(Select(Ident(newTermName("x")), newTermName("$plus")), List(Literal(Constant("5")))))
val tree = Apply(Select(q, newTermName("map")), List(fn))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
index 04714970dd..b4351c2c53 100644
--- a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
@@ -21,18 +21,19 @@ case class Utils[C <: Context]( c:C ) {
}
}
object QueryableMacros{
- def _helper[C <: Context,S:c.AbsTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+ def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
import c.universe._
- val element_type = implicitly[c.AbsTypeTag[S]].tpe
+ import treeBuild._
+ val element_type = implicitly[c.WeakTypeTag[S]].tpe
val foo = c.Expr[ru.Expr[Queryable[S]]](
- c.reifyTree( c.runtimeUniverse, EmptyTree, c.typeCheck(
+ c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
Utils[c.type](c).removeDoubleReify(
Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
).asInstanceOf[Tree]
)))
c.universe.reify{ Queryable.factory[S]( foo.splice )}
}
- def map[T:c.AbsTypeTag, S:c.AbsTypeTag]
+ def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
(c: scala.reflect.macros.Context)
(projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
}
diff --git a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
index 04714970dd..b4351c2c53 100644
--- a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
@@ -21,18 +21,19 @@ case class Utils[C <: Context]( c:C ) {
}
}
object QueryableMacros{
- def _helper[C <: Context,S:c.AbsTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+ def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
import c.universe._
- val element_type = implicitly[c.AbsTypeTag[S]].tpe
+ import treeBuild._
+ val element_type = implicitly[c.WeakTypeTag[S]].tpe
val foo = c.Expr[ru.Expr[Queryable[S]]](
- c.reifyTree( c.runtimeUniverse, EmptyTree, c.typeCheck(
+ c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
Utils[c.type](c).removeDoubleReify(
Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
).asInstanceOf[Tree]
)))
c.universe.reify{ Queryable.factory[S]( foo.splice )}
}
- def map[T:c.AbsTypeTag, S:c.AbsTypeTag]
+ def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
(c: scala.reflect.macros.Context)
(projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
}
diff --git a/test/files/run/macro-reify-ref-to-packageless.check b/test/files/run/macro-reify-ref-to-packageless.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/macro-reify-ref-to-packageless.check
+++ b/test/files/run/macro-reify-ref-to-packageless.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/macro-reify-splice-outside-reify.check b/test/files/run/macro-reify-splice-outside-reify.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/macro-reify-splice-outside-reify.check
+++ b/test/files/run/macro-reify-splice-outside-reify.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
index f3e1c9ae95..5330d0e32b 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
@@ -4,18 +4,7 @@ object Impls {
def foo(c: Ctx)(x: c.Expr[Int]) = {
val x1 = c.Expr[Int](c.resetAllAttrs(x.tree))
// was: c.literal(x1.splice)
- c.literal(eval(c)(x1))
- }
-
- private def eval[T](c: Ctx)(x: c.Expr[T]): T = {
- import scala.reflect.runtime.{universe => ru}
- val mirror = ru.runtimeMirror(c.libraryClassLoader)
- import scala.tools.reflect.ToolBox
- val toolBox = mirror.mkToolBox()
- val importer = ru.mkImporter(c.universe).asInstanceOf[ru.Importer { val from: c.universe.type }]
- val tree = c.resetAllAttrs(x.tree.duplicate)
- val imported = importer.importTree(tree)
- toolBox.runExpr(imported).asInstanceOf[T]
+ c.literal(c.eval(x1))
}
}
diff --git a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
index 5bca7db668..8f96ea199d 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
@@ -3,6 +3,6 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tree = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant(42))))
- try println(cm.mkToolBox().runExpr(tree))
+ try println(cm.mkToolBox().eval(tree))
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-reify-splice-splice.check b/test/files/run/macro-reify-splice-splice.check
index f35d3e67b4..3b18e512db 100644
--- a/test/files/run/macro-reify-splice-splice.check
+++ b/test/files/run/macro-reify-splice-splice.check
@@ -1 +1 @@
-hello world
+hello world
diff --git a/test/files/run/macro-reify-tagful-a.check b/test/files/run/macro-reify-tagful-a.check
index 8a701df6a5..3f4c719990 100644
--- a/test/files/run/macro-reify-tagful-a.check
+++ b/test/files/run/macro-reify-tagful-a.check
@@ -1 +1 @@
-List(hello world)
+List(hello world)
diff --git a/test/files/run/macro-reify-tagful-a/Macros_1.scala b/test/files/run/macro-reify-tagful-a/Macros_1.scala
index 0eac74236f..f2512dcfaf 100644
--- a/test/files/run/macro-reify-tagful-a/Macros_1.scala
+++ b/test/files/run/macro-reify-tagful-a/Macros_1.scala
@@ -5,7 +5,7 @@ object Macros {
def foo[T](s: T) = macro Impls.foo[T]
object Impls {
- def foo[T: c.AbsTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
List(s.splice)
}
}
diff --git a/test/files/run/macro-reify-tagless-a.check b/test/files/run/macro-reify-tagless-a.check
index d69f641280..231741edc5 100644
--- a/test/files/run/macro-reify-tagless-a.check
+++ b/test/files/run/macro-reify-tagless-a.check
@@ -1,3 +1,3 @@
-reflective compilation has failed:
-
-Macro expansion contains free type variable T defined by foo in Impls_Macros_1.scala:7:13. Have you forgotten to use c.AbsTypeTag annotation for this type parameter? If you have troubles tracking free type variables, consider using -Xlog-free-types
+reflective compilation has failed:
+
+Macro expansion contains free type variable T defined by foo in Impls_Macros_1.scala:7:13. Have you forgotten to use c.WeakTypeTag annotation for this type parameter? If you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/macro-reify-tagless-a/Test_2.scala b/test/files/run/macro-reify-tagless-a/Test_2.scala
index 7029e59ea1..1bb3945ece 100644
--- a/test/files/run/macro-reify-tagless-a/Test_2.scala
+++ b/test/files/run/macro-reify-tagless-a/Test_2.scala
@@ -9,6 +9,6 @@ object Test extends App {
val rhs = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant("hello world"))))
val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
- try cm.mkToolBox().runExpr(tree)
+ try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-reify-typetag-notypeparams.check b/test/files/run/macro-reify-typetag-notypeparams.check
index aadd713bef..7732c10d75 100644
--- a/test/files/run/macro-reify-typetag-notypeparams.check
+++ b/test/files/run/macro-reify-typetag-notypeparams.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-TypeTag[List[Int]]
+TypeTag[Int]
+TypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-typetag-typeparams-tags.check b/test/files/run/macro-reify-typetag-typeparams-tags.check
index aadd713bef..7732c10d75 100644
--- a/test/files/run/macro-reify-typetag-typeparams-tags.check
+++ b/test/files/run/macro-reify-typetag-typeparams-tags.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-TypeTag[List[Int]]
+TypeTag[Int]
+TypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-unreify.check b/test/files/run/macro-reify-unreify.check
index 8f8d9d944d..7a6d53c47e 100644
--- a/test/files/run/macro-reify-unreify.check
+++ b/test/files/run/macro-reify-unreify.check
@@ -1 +1 @@
-hello world = Expr[java.lang.String("hello world")]("hello world")
+hello world = Expr[java.lang.String("hello world")]("hello world")
diff --git a/test/files/run/macro-reify-unreify/Macros_1.scala b/test/files/run/macro-reify-unreify/Macros_1.scala
index 620a929210..9f04c13014 100644
--- a/test/files/run/macro-reify-unreify/Macros_1.scala
+++ b/test/files/run/macro-reify-unreify/Macros_1.scala
@@ -6,9 +6,10 @@ object Macros {
object Impls {
def foo(c: Ctx)(s: c.Expr[String]) = {
import c.universe._
+ import treeBuild._
- val world = c.reifyTree(c.runtimeUniverse, EmptyTree, s.tree)
- val greeting = c.reifyTree(c.runtimeUniverse, EmptyTree, c.typeCheck(Apply(Select(Literal(Constant("hello ")), newTermName("$plus")), List(c.unreifyTree(world)))))
+ val world = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, s.tree)
+ val greeting = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, c.typeCheck(Apply(Select(Literal(Constant("hello ")), newTermName("$plus")), List(c.unreifyTree(world)))))
val typedGreeting = c.Expr[String](greeting)
c.universe.reify {
diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check
index 4a50c60469..7deed4a878 100644
--- a/test/files/run/macro-repl-basic.check
+++ b/test/files/run/macro-repl-basic.check
@@ -1,54 +1,54 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import language.experimental.macros
-import language.experimental.macros
-
-scala> import scala.reflect.macros.{Context => Ctx}
-import scala.reflect.macros.{Context=>Ctx}
-
-scala>
-
-scala> object Impls {
- def foo(c: Ctx)(x: c.Expr[Int]) = {
- import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
- c.Expr[Int](body)
- }
-
- def bar(c: Ctx)(x: c.Expr[Int]) = {
- import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
- c.Expr[Int](body)
- }
-
- def quux(c: Ctx)(x: c.Expr[Int]) = {
- import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
- c.Expr[Int](body)
- }
-}
-defined module Impls
-
-scala> object Macros {
- object Shmacros {
- def foo(x: Int): Int = macro Impls.foo
- }
- def bar(x: Int): Int = macro Impls.bar
-}; class Macros {
- def quux(x: Int): Int = macro Impls.quux
-}
-defined module Macros
-defined class Macros
-
-scala>
-
-scala> import Macros.Shmacros._
-import Macros.Shmacros._
-
-scala> println(foo(2) + Macros.bar(2) * new Macros().quux(4))
-31
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala> import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.{Context=>Ctx}
+
+scala>
+
+scala> object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+
+ def bar(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ c.Expr[Int](body)
+ }
+
+ def quux(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ c.Expr[Int](body)
+ }
+}
+defined module Impls
+
+scala> object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo
+ }
+ def bar(x: Int): Int = macro Impls.bar
+}; class Macros {
+ def quux(x: Int): Int = macro Impls.quux
+}
+defined module Macros
+defined class Macros
+
+scala>
+
+scala> import Macros.Shmacros._
+import Macros.Shmacros._
+
+scala> println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+31
+
+scala>
diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check
index 3c378cdf09..628a9146c4 100644
--- a/test/files/run/macro-repl-dontexpand.check
+++ b/test/files/run/macro-repl-dontexpand.check
@@ -1,12 +1,12 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> def bar(c: scala.reflect.macros.Context) = ???
-bar: (c: scala.reflect.macros.Context)Nothing
-
-scala> def foo = macro bar
-foo: Any
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def bar(c: scala.reflect.macros.Context) = ???
+bar: (c: scala.reflect.macros.Context)Nothing
+
+scala> def foo = macro bar
+foo: Any
+
+scala>
diff --git a/test/files/run/macro-settings.check b/test/files/run/macro-settings.check
index 33784d1c15..050d53cdbb 100644
--- a/test/files/run/macro-settings.check
+++ b/test/files/run/macro-settings.check
@@ -1 +1 @@
-List(hello=1)
+List(hello=1)
diff --git a/test/files/run/macro-sip19-revised.check b/test/files/run/macro-sip19-revised.check
index aa2fbd11d3..86c3d819b0 100644
--- a/test/files/run/macro-sip19-revised.check
+++ b/test/files/run/macro-sip19-revised.check
@@ -1,5 +1,5 @@
-hey, i've been called from SourceLocation1(null,Test_2.scala,11,251)
-hey, i've been called from SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222)
-hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222)
-hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222),Test_2.scala,6,180)
-2
+hey, i've been called from SourceLocation1(null,Test_2.scala,11,251)
+hey, i've been called from SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222)
+hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222)
+hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222),Test_2.scala,6,180)
+2
diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
index 0793696fd4..5f3f61ca3f 100644
--- a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
@@ -8,7 +8,7 @@ object Macros {
val outer = c.Expr[SourceLocation](if (!inscope.isEmpty) inscope else Literal(Constant(null)))
val Apply(fun, args) = c.enclosingImplicits(0)._2
- val fileName = fun.pos.fileInfo.getName
+ val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
c.universe.reify { SourceLocation1(outer.splice, c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
diff --git a/test/files/run/macro-sip19.check b/test/files/run/macro-sip19.check
index 6b317ccb47..07cfd8c1e1 100644
--- a/test/files/run/macro-sip19.check
+++ b/test/files/run/macro-sip19.check
@@ -1,5 +1,5 @@
-hey, i've been called from SourceLocation(Test_2.scala,15,366)
-hey, i've been called from SourceLocation(Test_2.scala,11,331)
-hey, i've been called from SourceLocation(Test_2.scala,11,331)
-hey, i've been called from SourceLocation(Test_2.scala,9,285)
-2
+hey, i've been called from SourceLocation(Test_2.scala,15,366)
+hey, i've been called from SourceLocation(Test_2.scala,11,331)
+hey, i've been called from SourceLocation(Test_2.scala,11,331)
+hey, i've been called from SourceLocation(Test_2.scala,9,285)
+2
diff --git a/test/files/run/macro-sip19/Impls_Macros_1.scala b/test/files/run/macro-sip19/Impls_Macros_1.scala
index f89e51f560..535ec2ccf0 100644
--- a/test/files/run/macro-sip19/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19/Impls_Macros_1.scala
@@ -4,7 +4,7 @@ object Macros {
def impl(c: Context) = {
import c.universe._
val Apply(fun, args) = c.enclosingImplicits(0)._2
- val fileName = fun.pos.fileInfo.getName
+ val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
c.universe.reify { SourceLocation(c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check
index aa6c8e1f07..6cf25076a7 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled.check
+++ b/test/files/run/macro-typecheck-implicitsdisabled.check
@@ -1,2 +1,2 @@
-scala.this.Predef.any2ArrowAssoc[Int](1).->[Int](2)
-scala.reflect.internal.Types$TypeError: value -> is not a member of Int
+scala.this.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+scala.reflect.internal.Types$TypeError: value -> is not a member of Int
diff --git a/test/files/run/macro-typecheck-macrosdisabled.check b/test/files/run/macro-typecheck-macrosdisabled.check
index c560b0e4b5..29a881f8b1 100644
--- a/test/files/run/macro-typecheck-macrosdisabled.check
+++ b/test/files/run/macro-typecheck-macrosdisabled.check
@@ -1,32 +1,32 @@
-{
- val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
- val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
- $u.Expr.apply[Int(2)]($m, {
- final class $treecreator1 extends TreeCreator {
- def <init>(): $treecreator1 = {
- $treecreator1.super.<init>();
- ()
- };
- def apply[U >: Nothing <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Tree = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.Literal.apply($u.Constant.apply(2))
- }
- };
- new $treecreator1()
- })($u.TypeTag.apply[Int(2)]($m, {
- final class $typecreator2 extends TypeCreator {
- def <init>(): $typecreator2 = {
- $typecreator2.super.<init>();
- ()
- };
- def apply[U >: Nothing <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Type = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.ConstantType.apply($u.Constant.apply(2))
- }
- };
- new $typecreator2()
- }))
-}
-ru.reify[Int](2)
+{
+ val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
+ $u.Expr.apply[Int(2)]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Literal.apply($u.Constant.apply(2))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Int(2)]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.ConstantType.apply($u.Constant.apply(2))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Int](2)
diff --git a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
index b2f6f7d50e..f693ad78cc 100644
--- a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
@@ -18,7 +18,8 @@ object Macros {
val rupkg = c.mirror.staticModule("scala.reflect.runtime.package")
val rusym = build.selectTerm(rupkg, "universe")
val NullaryMethodType(rutpe) = rusym.typeSignature
- val ru = build.newFreeTerm("ru", rutpe, scala.reflect.runtime.universe)
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
diff --git a/test/files/run/macro-typecheck-macrosdisabled2.check b/test/files/run/macro-typecheck-macrosdisabled2.check
index 55e7913250..7bdd1d6a3a 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2.check
+++ b/test/files/run/macro-typecheck-macrosdisabled2.check
@@ -1,32 +1,32 @@
-{
- val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
- val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
- $u.Expr.apply[Array[Int]]($m, {
- final class $treecreator1 extends TreeCreator {
- def <init>(): $treecreator1 = {
- $treecreator1.super.<init>();
- ()
- };
- def apply[U >: Nothing <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Tree = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.Apply.apply($u.Select.apply($u.Select.apply($u.build.Ident($m.staticPackage("scala")), $u.newTermName("Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
- }
- };
- new $treecreator1()
- })($u.TypeTag.apply[Array[Int]]($m, {
- final class $typecreator2 extends TypeCreator {
- def <init>(): $typecreator2 = {
- $typecreator2.super.<init>();
- ()
- };
- def apply[U >: Nothing <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Type = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
- }
- };
- new $typecreator2()
- }))
-}
-ru.reify[Array[Int]](scala.Array.apply(2))
+{
+ val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
+ $u.Expr.apply[Array[Int]]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Apply.apply($u.Select.apply($u.Select.apply($u.build.Ident($m.staticPackage("scala")), $u.newTermName("Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Array[Int]]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Array[Int]](scala.Array.apply(2))
diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
index 948c047351..1dbf5a1a87 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
@@ -18,7 +18,8 @@ object Macros {
val rupkg = c.mirror.staticModule("scala.reflect.runtime.package")
val rusym = build.selectTerm(rupkg, "universe")
val NullaryMethodType(rutpe) = rusym.typeSignature
- val ru = build.newFreeTerm("ru", rutpe, scala.reflect.runtime.universe)
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
diff --git a/test/files/run/macro-undetparams-consfromsls.check b/test/files/run/macro-undetparams-consfromsls.check
index 4b83c55437..b10a90043e 100644
--- a/test/files/run/macro-undetparams-consfromsls.check
+++ b/test/files/run/macro-undetparams-consfromsls.check
@@ -1,5 +1,5 @@
-A = TypeTag[Int]
-B = TypeTag[Nothing]
-List(1)
-A = TypeTag[Any]
-List(abc, 1)
+A = TypeTag[Int]
+B = TypeTag[Nothing]
+List(1)
+A = TypeTag[Any]
+List(abc, 1)
diff --git a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
index 3d350a50fa..bcbd12817b 100644
--- a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
+++ b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
@@ -2,13 +2,13 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
object Macros {
- def cons_impl[A: c.AbsTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = c.universe.reify {
- println("A = " + c.literal(implicitly[c.AbsTypeTag[A]].toString).splice)
+ def cons_impl[A: c.WeakTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = c.universe.reify {
+ println("A = " + c.literal(implicitly[c.WeakTypeTag[A]].toString).splice)
x.splice :: xs.splice
}
- def nil_impl[B: c.AbsTypeTag](c: Context): c.Expr[List[B]] = c.universe.reify {
- println("B = " + c.literal(implicitly[c.AbsTypeTag[B]].toString).splice)
+ def nil_impl[B: c.WeakTypeTag](c: Context): c.Expr[List[B]] = c.universe.reify {
+ println("B = " + c.literal(implicitly[c.WeakTypeTag[B]].toString).splice)
Nil
}
diff --git a/test/files/run/macro-undetparams-implicitval.check b/test/files/run/macro-undetparams-implicitval.check
index 30681fa40d..541b922b21 100644
--- a/test/files/run/macro-undetparams-implicitval.check
+++ b/test/files/run/macro-undetparams-implicitval.check
@@ -1 +1 @@
-TypeTag[Nothing]
+TypeTag[Nothing]
diff --git a/test/files/run/macro-undetparams-macroitself.check b/test/files/run/macro-undetparams-macroitself.check
index 80c6b826ba..fa6b335afb 100644
--- a/test/files/run/macro-undetparams-macroitself.check
+++ b/test/files/run/macro-undetparams-macroitself.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
-TypeTag[String]
+TypeTag[Int]
+WeakTypeTag[String]
diff --git a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
index 081894cf52..0244273b6f 100644
--- a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
+++ b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
@@ -2,7 +2,7 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
object Macros {
- def impl[T: c.AbsTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = c.universe.reify { println(c.literal(implicitly[c.AbsTypeTag[T]].toString).splice) }
+ def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = c.universe.reify { println(c.literal(implicitly[c.WeakTypeTag[T]].toString).splice) }
def foo[T](foo: T) = macro impl[T]
} \ No newline at end of file
diff --git a/test/files/run/newTags.check b/test/files/run/newTags.check
index 2cbc265d7a..16be9b124e 100644
--- a/test/files/run/newTags.check
+++ b/test/files/run/newTags.check
@@ -1,5 +1,3 @@
-TypeRef(SingleType(SingleType(SingleType(NoPrefix,class <root>),module scala),module package),class List,List(TypeRef(ThisType(class scala),class Int,List())))
-List[Int]
-TypeRef(SingleType(ThisType(class scala),module Predef),class Map,List(TypeRef(SingleType(ThisType(class scala),module Predef),class String,List()), TypeRef(SingleType(ThisType(class scala),module Predef),class String,List())))
-Map[String,String]
-TypeTag[TypeRef(SingleType(ThisType(class scala),module Predef),class Map,List(TypeRef(SingleType(ThisType(class scala),module Predef),class String,List()), TypeRef(SingleType(ThisType(class scala),module Predef),class String,List())))]
+List[Int]
+Map[String,String]
+TypeTag[Map[String,String]]
diff --git a/test/files/run/newTags.scala b/test/files/run/newTags.scala
index a758599515..c5199d4e55 100644
--- a/test/files/run/newTags.scala
+++ b/test/files/run/newTags.scala
@@ -1,14 +1,11 @@
-import scala.reflect.base.{Universe => BaseUniverse}
-import scala.reflect.{basis => rb}
+import scala.reflect.api.{Universe => ApiUniverse}
import scala.reflect.runtime.{universe => ru}
object Test extends App {
- println(rb.typeOf[List[Int]])
println(ru.typeOf[List[Int]])
- def foo[T: rb.TypeTag] = {
- println(rb.typeOf[T])
+ def foo[T: ru.TypeTag] = {
println(ru.typeOf[T])
- println(implicitly[BaseUniverse#TypeTag[T]])
+ println(implicitly[ApiUniverse#TypeTag[T]])
}
foo[Map[String, String]]
} \ No newline at end of file
diff --git a/test/files/run/partialfun.check b/test/files/run/partialfun.check
new file mode 100644
index 0000000000..d4e9f494cd
--- /dev/null
+++ b/test/files/run/partialfun.check
@@ -0,0 +1,6 @@
+47
+147
+100
+0:isDefinedAt
+1:isDefinedAt
+2:apply
diff --git a/test/files/run/partialfun.scala b/test/files/run/partialfun.scala
new file mode 100644
index 0000000000..f3c53b94ae
--- /dev/null
+++ b/test/files/run/partialfun.scala
@@ -0,0 +1,86 @@
+import collection._
+import collection.generic._
+
+object Test {
+ def collectIDA[A, B, Repr, That](_this: TraversableLike[A, Repr])(pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val repr: Repr = _this.asInstanceOf[Repr]
+ val b = bf(repr)
+ _this foreach { x => if (pf isDefinedAt x) b += pf(x) }
+ b.result
+ }
+
+ def collectRW[A, B, Repr, That](_this: TraversableLike[A, Repr])(pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val repr: Repr = _this.asInstanceOf[Repr]
+ val b = bf(repr)
+ val f = pf runWith { b += _ }
+ _this foreach f
+ b.result
+ }
+
+ var cnt = 0
+
+ object Ex1 {
+ def unapply(x: Int) : Option[Int] = {
+ cnt += 1
+ if ((x % 3) == 0) Some(-x) else None
+ }
+ }
+
+ object Ex2 {
+ def unapply(x: Int) : Option[Int] = {
+ //cnt += 1
+ if ((x % 5) == 0) Some(x) else None
+ }
+ }
+
+ def resetCnt() = { val r = cnt; cnt = 0; r }
+
+ val pf: PartialFunction[Int,Int] = {
+ case Ex1(result) => result
+ case Ex2(result) => result
+ }
+
+ def collectTest() {
+ val xs = 1 to 100
+ resetCnt()
+
+ val ysIDA = collectIDA(xs)(pf)
+ val cntIDA = resetCnt()
+
+ val ysRW = collectRW(xs)(pf)
+ val cntRW = resetCnt()
+
+ val ys = xs collect pf
+
+ assert(ys == ysIDA)
+ assert(ys == ysRW)
+ assert(cntIDA == xs.length + ys.length)
+ assert(cntRW == xs.length)
+ println(ys.length)
+ println(cntIDA)
+ println(cntRW)
+ }
+
+ def orElseTest() {
+ val pf0 = new PartialFunction[Unit, Unit] {
+ def apply(u: Unit) { println("0:apply") }
+ def isDefinedAt(u: Unit) = { println("0:isDefinedAt"); false }
+ }
+ val pf1 = new PartialFunction[Unit, Unit] {
+ def apply(u: Unit) { println("1:apply") }
+ def isDefinedAt(u: Unit) = { println("1:isDefinedAt"); false }
+ }
+ val pf2 = new PartialFunction[Unit, Unit] {
+ def apply(u: Unit) { println("2:apply") }
+ def isDefinedAt(u: Unit) = { println("2:isDefinedAt"); true }
+ }
+
+ val chained = pf0 orElse pf1 orElse pf2
+ chained()
+ }
+
+ def main(args: Array[String]): Unit = {
+ collectTest()
+ orElseTest()
+ }
+}
diff --git a/test/files/run/primitive-sigs-2-new.check b/test/files/run/primitive-sigs-2-new.check
index 6f79d73d38..59d864947c 100644
--- a/test/files/run/primitive-sigs-2-new.check
+++ b/test/files/run/primitive-sigs-2-new.check
@@ -1,7 +1,7 @@
-T<java.lang.Object>
-List(A, char, class java.lang.Object)
-a
-public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.ClassTag<T>)
-public float[] Arr.arr3(float[][])
-public scala.collection.immutable.List<java.lang.Character> Arr.arr2(java.lang.Character[])
-public scala.collection.immutable.List<java.lang.Object> Arr.arr1(int[])
+T<java.lang.Object>
+List(A, char, class java.lang.Object)
+a
+public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.ClassTag<T>)
+public float[] Arr.arr3(float[][])
+public scala.collection.immutable.List<java.lang.Character> Arr.arr2(java.lang.Character[])
+public scala.collection.immutable.List<java.lang.Object> Arr.arr1(int[])
diff --git a/test/files/run/primitive-sigs-2-old.check b/test/files/run/primitive-sigs-2-old.check
index 9132b4d8ae..feb0619525 100644
--- a/test/files/run/primitive-sigs-2-old.check
+++ b/test/files/run/primitive-sigs-2-old.check
@@ -1,7 +1,7 @@
-T<java.lang.Object>
-List(A, char, class java.lang.Object)
-a
-public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.Manifest<T>)
-public float[] Arr.arr3(float[][])
-public scala.collection.immutable.List<java.lang.Character> Arr.arr2(java.lang.Character[])
-public scala.collection.immutable.List<java.lang.Object> Arr.arr1(int[])
+T<java.lang.Object>
+List(A, char, class java.lang.Object)
+a
+public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.Manifest<T>)
+public float[] Arr.arr3(float[][])
+public scala.collection.immutable.List<java.lang.Character> Arr.arr2(java.lang.Character[])
+public scala.collection.immutable.List<java.lang.Object> Arr.arr1(int[])
diff --git a/test/files/run/pure-args-byname-noinline.check b/test/files/run/pure-args-byname-noinline.check
new file mode 100644
index 0000000000..a39c61eb64
--- /dev/null
+++ b/test/files/run/pure-args-byname-noinline.check
@@ -0,0 +1,12 @@
+2
+2
+2
+2
+List(1)
+List()
+
+1
+1
+1
+1
+1
diff --git a/test/files/run/pure-args-byname-noinline.scala b/test/files/run/pure-args-byname-noinline.scala
new file mode 100644
index 0000000000..5c5c8a7eb6
--- /dev/null
+++ b/test/files/run/pure-args-byname-noinline.scala
@@ -0,0 +1,33 @@
+object Test {
+ //Were affected by SI-6306
+ def f[A](a: =>A) = println(a.toString)
+ def f1[A <: AnyVal](a: =>A) = println(a.toString)
+ def f1a[A <: AnyVal](a: =>A) = println(a.##)
+ def f2[A <: AnyRef](a: =>A) = println(a.toString)
+ def f2a[A <: String](a: =>A) = println(a.toString)
+ //Works
+ def f3[A](a: =>Seq[A]) = println(a.toString)
+
+ def foo() = println(2)
+ def client(f: () => Unit) = {f(); f()}
+ def attempt2() {
+ val bar: () => Unit = foo _
+ //The code causing SI-6306 was supposed to optimize code like this:
+ client(() => bar ())
+ //to:
+ client(bar)
+ }
+ def main(args: Array[String]) {
+ attempt2()
+ f3(Seq(1))
+ f3(Seq())
+ f("")
+ f((1).toString)
+ f((1).##)
+ f1((1).##)
+ f2((1).toString)
+ f2a((1).toString)
+ }
+}
+
+// vim: set ts=8 sw=2 et:
diff --git a/test/files/run/reflection-allmirrors-tostring.check b/test/files/run/reflection-allmirrors-tostring.check
index b5fe6c33bb..2a3be29402 100644
--- a/test/files/run/reflection-allmirrors-tostring.check
+++ b/test/files/run/reflection-allmirrors-tostring.check
@@ -1,14 +1,14 @@
-class mirror for C (bound to null)
-module mirror for M (bound to null)
-instance mirror for an instance of C
-field mirror for C.f1 (bound to an instance of C)
-field mirror for C.f2 (bound to an instance of C)
-method mirror for C.m1: Int (bound to an instance of C)
-method mirror for C.m2(): Int (bound to an instance of C)
-method mirror for C.m3[T >: String <: Int]: T (bound to an instance of C)
-method mirror for C.m4[A, B <: A[Int]](x: A[B])(implicit y: Int): Nothing (bound to an instance of C)
-method mirror for C.m5(x: => Int, y: Int*): String (bound to an instance of C)
-class mirror for C.C (bound to an instance of C)
-module mirror for C.M (bound to an instance of C)
-constructor mirror for C.<init>(): C (bound to null)
-constructor mirror for C.C.<init>(): C.this.C (bound to an instance of C)
+class mirror for C (bound to null)
+module mirror for M (bound to null)
+instance mirror for an instance of C
+field mirror for C.f1 (bound to an instance of C)
+field mirror for C.f2 (bound to an instance of C)
+method mirror for C.m1: Int (bound to an instance of C)
+method mirror for C.m2(): Int (bound to an instance of C)
+method mirror for C.m3[T >: String <: Int]: T (bound to an instance of C)
+method mirror for C.m4[A, B <: A[Int]](x: A[B])(implicit y: Int): Nothing (bound to an instance of C)
+method mirror for C.m5(x: => Int, y: Int*): String (bound to an instance of C)
+class mirror for C.C (bound to an instance of C)
+module mirror for C.M (bound to an instance of C)
+constructor mirror for C.<init>(): C (bound to null)
+constructor mirror for C.C.<init>(): C.this.C (bound to an instance of C)
diff --git a/test/files/run/reflection-constructormirror-inner-badpath.check b/test/files/run/reflection-constructormirror-inner-badpath.check
index 2fb0610ad6..83852aa80b 100644
--- a/test/files/run/reflection-constructormirror-inner-badpath.check
+++ b/test/files/run/reflection-constructormirror-inner-badpath.check
@@ -1,2 +1,2 @@
-class R is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror
-()
+class R is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror
+()
diff --git a/test/files/run/reflection-constructormirror-inner-good.check b/test/files/run/reflection-constructormirror-inner-good.check
index 811ecb2e3f..d38fb33f97 100644
--- a/test/files/run/reflection-constructormirror-inner-good.check
+++ b/test/files/run/reflection-constructormirror-inner-good.check
@@ -1 +1 @@
-R(5,test)
+R(5,test)
diff --git a/test/files/run/reflection-constructormirror-nested-badpath.check b/test/files/run/reflection-constructormirror-nested-badpath.check
index acd21df9c0..4c65b8a58b 100644
--- a/test/files/run/reflection-constructormirror-nested-badpath.check
+++ b/test/files/run/reflection-constructormirror-nested-badpath.check
@@ -1,2 +1,2 @@
-class R is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror
-()
+class R is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror
+()
diff --git a/test/files/run/reflection-constructormirror-nested-good.check b/test/files/run/reflection-constructormirror-nested-good.check
index 811ecb2e3f..d38fb33f97 100644
--- a/test/files/run/reflection-constructormirror-nested-good.check
+++ b/test/files/run/reflection-constructormirror-nested-good.check
@@ -1 +1 @@
-R(5,test)
+R(5,test)
diff --git a/test/files/run/reflection-constructormirror-toplevel-badpath.check b/test/files/run/reflection-constructormirror-toplevel-badpath.check
index acd21df9c0..4c65b8a58b 100644
--- a/test/files/run/reflection-constructormirror-toplevel-badpath.check
+++ b/test/files/run/reflection-constructormirror-toplevel-badpath.check
@@ -1,2 +1,2 @@
-class R is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror
-()
+class R is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror
+()
diff --git a/test/files/run/reflection-constructormirror-toplevel-good.check b/test/files/run/reflection-constructormirror-toplevel-good.check
index 811ecb2e3f..d38fb33f97 100644
--- a/test/files/run/reflection-constructormirror-toplevel-good.check
+++ b/test/files/run/reflection-constructormirror-toplevel-good.check
@@ -1 +1 @@
-R(5,test)
+R(5,test)
diff --git a/test/files/run/reflection-enclosed-basic.check b/test/files/run/reflection-enclosed-basic.check
index 41f6a72f1c..6210b42ea5 100644
--- a/test/files/run/reflection-enclosed-basic.check
+++ b/test/files/run/reflection-enclosed-basic.check
@@ -1,18 +1,18 @@
-class B1
-B1
-1
-class B2
-B2
-2
-object B3
-B3
-3
-object B4
-B4
-4
-object B5
-B5
-5
-object B6
-B6
-6
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-inner-basic.check b/test/files/run/reflection-enclosed-inner-basic.check
index 984fb1ff12..2496ee407b 100644
--- a/test/files/run/reflection-enclosed-inner-basic.check
+++ b/test/files/run/reflection-enclosed-inner-basic.check
@@ -1,20 +1,20 @@
-class B
-List(constructor B, class B1, class B2, object B3, object B4, object B5, object B6)
-class B1
-B1
-1
-class B2
-B2
-2
-object B3
-B3
-3
-object B4
-B4
-4
-object B5
-B5
-5
-object B6
-B6
-6
+class B
+List(constructor B, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-inner-inner-basic.check b/test/files/run/reflection-enclosed-inner-inner-basic.check
index 8987f31b18..add7a81c0a 100644
--- a/test/files/run/reflection-enclosed-inner-inner-basic.check
+++ b/test/files/run/reflection-enclosed-inner-inner-basic.check
@@ -1,20 +1,20 @@
-class BB
-List(constructor BB, class B1, class B2, object B3, object B4, object B5, object B6)
-class B1
-B1
-1
-class B2
-B2
-2
-object B3
-B3
-3
-object B4
-B4
-4
-object B5
-B5
-5
-object B6
-B6
-6
+class BB
+List(constructor BB, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check
index be531fbbd3..17c1f6dd70 100644
--- a/test/files/run/reflection-equality.check
+++ b/test/files/run/reflection-equality.check
@@ -1,53 +1,53 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> class X {
- def methodIntIntInt(x: Int, y: Int) = x+y
-}
-defined class X
-
-scala>
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala> import scala.reflect.runtime.{ currentMirror => cm }
-import scala.reflect.runtime.{currentMirror=>cm}
-
-scala> def im: InstanceMirror = cm.reflect(new X)
-im: reflect.runtime.universe.InstanceMirror
-
-scala> val cs: ClassSymbol = im.symbol
-cs: reflect.runtime.universe.ClassSymbol = class X
-
-scala> val ts: Type = cs.typeSignature
-ts: reflect.runtime.universe.Type =
-java.lang.Object {
- def <init>: <?>
- def methodIntIntInt: <?>
-}
-
-scala> val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
-ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt
-
-scala> val MethodType( _, t1 ) = ms.typeSignature
-t1: reflect.runtime.universe.Type = scala.Int
-
-scala> val t2 = typeOf[scala.Int]
-t2: reflect.runtime.universe.Type = Int
-
-scala> t1 == t2
-res0: Boolean = false
-
-scala> t1 =:= t2
-res1: Boolean = true
-
-scala> t1 <:< t2
-res2: Boolean = true
-
-scala> t2 <:< t1
-res3: Boolean = true
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class X {
+ def methodIntIntInt(x: Int, y: Int) = x+y
+}
+defined class X
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.reflect.runtime.{ currentMirror => cm }
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> def im: InstanceMirror = cm.reflect(new X)
+im: reflect.runtime.universe.InstanceMirror
+
+scala> val cs: ClassSymbol = im.symbol
+cs: reflect.runtime.universe.ClassSymbol = class X
+
+scala> val ts: Type = cs.typeSignature
+ts: reflect.runtime.universe.Type =
+java.lang.Object {
+ def <init>(): X
+ def methodIntIntInt(x: scala.Int,y: scala.Int): scala.Int
+}
+
+scala> val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
+ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt
+
+scala> val MethodType( _, t1 ) = ms.typeSignature
+t1: reflect.runtime.universe.Type = scala.Int
+
+scala> val t2 = typeOf[scala.Int]
+t2: reflect.runtime.universe.Type = Int
+
+scala> t1 == t2
+res0: Boolean = false
+
+scala> t1 =:= t2
+res1: Boolean = true
+
+scala> t1 <:< t2
+res2: Boolean = true
+
+scala> t2 <:< t1
+res3: Boolean = true
+
+scala>
diff --git a/test/files/run/reflection-fieldmirror-accessorsareokay.check b/test/files/run/reflection-fieldmirror-accessorsareokay.check
index 635dcd04ce..e6936c8acc 100644
--- a/test/files/run/reflection-fieldmirror-accessorsareokay.check
+++ b/test/files/run/reflection-fieldmirror-accessorsareokay.check
@@ -1,6 +1,6 @@
-true
-42
-2
-true
-2
-2
+true
+42
+2
+true
+2
+2
diff --git a/test/files/run/reflection-fieldmirror-accessorsareokay.scala b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
index 9590cbe811..16354025f3 100644
--- a/test/files/run/reflection-fieldmirror-accessorsareokay.scala
+++ b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
@@ -14,7 +14,7 @@ object Test extends App {
def test(f: Symbol) = {
try {
val fm: FieldMirror = im.reflectField(f.asTerm)
- println(fm.symbol.isVariable)
+ println(fm.symbol.isVar)
println(fm.get)
fm.set(2)
println(fm.get)
diff --git a/test/files/run/reflection-fieldmirror-ctorparam.check b/test/files/run/reflection-fieldmirror-ctorparam.check
index 31f6491b14..e391e7ccfe 100644
--- a/test/files/run/reflection-fieldmirror-ctorparam.check
+++ b/test/files/run/reflection-fieldmirror-ctorparam.check
@@ -1,3 +1,3 @@
class scala.ScalaReflectionException: Scala field x isn't represented as a Java field, neither it has a Java accessor method
note that private parameters of class constructors don't get mapped onto fields and/or accessors,
-unless they are used outside of their declaring constructors.
+unless they are used outside of their declaring constructors.
diff --git a/test/files/run/reflection-fieldmirror-getsetval.check b/test/files/run/reflection-fieldmirror-getsetval.check
index e1927f68d0..82fef37c25 100644
--- a/test/files/run/reflection-fieldmirror-getsetval.check
+++ b/test/files/run/reflection-fieldmirror-getsetval.check
@@ -1,2 +1,2 @@
-42
-cannot set an immutable field x
+42
+cannot set an immutable field x
diff --git a/test/files/run/reflection-fieldmirror-getsetvar.check b/test/files/run/reflection-fieldmirror-getsetvar.check
index a9b0ccc6a0..1e959a9900 100644
--- a/test/files/run/reflection-fieldmirror-getsetvar.check
+++ b/test/files/run/reflection-fieldmirror-getsetvar.check
@@ -1,2 +1,2 @@
-42
-2
+42
+2
diff --git a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check
index fe75c80bca..27ba77ddaf 100644
--- a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check
+++ b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check
@@ -1 +1 @@
-true
+true
diff --git a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
index 5cfe583ed5..2b4a9bb55e 100644
--- a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
+++ b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
@@ -12,5 +12,5 @@ object Test extends App {
val cs = im.symbol
val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
val fm: FieldMirror = im.reflectField(f)
- println(fm.symbol.isVariable)
+ println(fm.symbol.isVar)
}
diff --git a/test/files/run/reflection-fieldmirror-privatethis.check b/test/files/run/reflection-fieldmirror-privatethis.check
index 889b7c2c34..16010115e1 100644
--- a/test/files/run/reflection-fieldmirror-privatethis.check
+++ b/test/files/run/reflection-fieldmirror-privatethis.check
@@ -1,3 +1,3 @@
-true
-42
-2
+true
+42
+2
diff --git a/test/files/run/reflection-fieldmirror-privatethis.scala b/test/files/run/reflection-fieldmirror-privatethis.scala
index 7aa179958d..ab838dbb1b 100644
--- a/test/files/run/reflection-fieldmirror-privatethis.scala
+++ b/test/files/run/reflection-fieldmirror-privatethis.scala
@@ -12,7 +12,7 @@ object Test extends App {
val cs = im.symbol
val f = cs.typeSignature.declaration(newTermName("x")).asTerm
val fm: FieldMirror = im.reflectField(f)
- println(fm.symbol.isVariable)
+ println(fm.symbol.isVar)
println(fm.get)
fm.set(2)
println(fm.get)
diff --git a/test/files/run/reflection-fieldsymbol-navigation.check b/test/files/run/reflection-fieldsymbol-navigation.check
index 79f0928ea5..ae0597a045 100644
--- a/test/files/run/reflection-fieldsymbol-navigation.check
+++ b/test/files/run/reflection-fieldsymbol-navigation.check
@@ -1,6 +1,6 @@
-method x
-false
-variable x
-true
-method x
-method x_=
+method x
+false
+variable x
+true
+method x
+method x_=
diff --git a/test/files/run/reflection-fieldsymbol-navigation.scala b/test/files/run/reflection-fieldsymbol-navigation.scala
index da4612a564..4448724988 100644
--- a/test/files/run/reflection-fieldsymbol-navigation.scala
+++ b/test/files/run/reflection-fieldsymbol-navigation.scala
@@ -7,9 +7,9 @@ class C {
object Test extends App {
val x = typeOf[C].member(newTermName("x")).asTerm
println(x)
- println(x.isVariable)
+ println(x.isVar)
println(x.accessed)
- println(x.accessed.asTerm.isVariable)
+ println(x.accessed.asTerm.isVar)
println(x.getter)
println(x.setter)
} \ No newline at end of file
diff --git a/test/files/run/reflection-implicit.check b/test/files/run/reflection-implicit.check
new file mode 100644
index 0000000000..5a88a46f0b
--- /dev/null
+++ b/test/files/run/reflection-implicit.check
@@ -0,0 +1,2 @@
+List(true, true, true, true)
+true
diff --git a/test/files/run/reflection-implicit.scala b/test/files/run/reflection-implicit.scala
new file mode 100644
index 0000000000..637ef24e14
--- /dev/null
+++ b/test/files/run/reflection-implicit.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ implicit val v = new C
+ implicit def d(x: C)(implicit c: C): Int = ???
+ implicit class X(val x: Int)
+}
+
+object Test extends App {
+ val decls = typeOf[C].typeSymbol.typeSignature.declarations.sorted.toList.filter(sym => !sym.isTerm || (sym.isMethod && !sym.asMethod.isConstructor))
+ println(decls map (_.isImplicit))
+ val param = decls.find(_.name.toString == "d").get.asMethod.params.last.head
+ param.typeSignature
+ println(param.isImplicit)
+} \ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check
new file mode 100644
index 0000000000..53c53cfbcc
--- /dev/null
+++ b/test/files/run/reflection-java-annotations.check
@@ -0,0 +1 @@
+List(JavaComplexAnnotation(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation], classOf[JavaComplexAnnotation]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false))
diff --git a/test/files/run/reflection-java-annotations.scala b/test/files/run/reflection-java-annotations.scala
new file mode 100644
index 0000000000..2e3fed48ce
--- /dev/null
+++ b/test/files/run/reflection-java-annotations.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ val sym = typeOf[JavaAnnottee].typeSymbol
+ sym.typeSignature
+ sym.annotations foreach (_.javaArgs)
+ println(sym.annotations)
+} \ No newline at end of file
diff --git a/test/files/run/reflection-java-crtp.check b/test/files/run/reflection-java-crtp.check
new file mode 100644
index 0000000000..3e5a77e93a
--- /dev/null
+++ b/test/files/run/reflection-java-crtp.check
@@ -0,0 +1 @@
+(type E,type E,true)
diff --git a/test/files/run/reflection-java-crtp.scala b/test/files/run/reflection-java-crtp.scala
new file mode 100644
index 0000000000..260d3540dc
--- /dev/null
+++ b/test/files/run/reflection-java-crtp.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ val enum = typeOf[JavaSimpleEnumeration].baseClasses(1).asClass
+ // make sure that the E's in Enum<E extends Enum<E>> are represented by the same symbol
+ val e1 = enum.typeParams(0).asType
+ val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.typeSignature
+ println(e1, e2, e1 eq e2)
+} \ No newline at end of file
diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check
index 674716adfe..f5258efeb7 100644
--- a/test/files/run/reflection-magicsymbols-invoke.check
+++ b/test/files/run/reflection-magicsymbols-invoke.check
@@ -1,124 +1,124 @@
============
-Any
-it's important to print the list of Any's members
-if some of them change (possibly, adding and/or removing magic symbols), we must update this test
-method !=: (x$1: Any)Boolean
-method ##: ()Int
-method ==: (x$1: Any)Boolean
-method asInstanceOf: [T0]=> T0
-method equals: (x$1: Any)Boolean
-method getClass: ()java.lang.Class[_]
-method hashCode: ()Int
-method isInstanceOf: [T0]=> Boolean
-method toString: ()java.lang.String
-testing Any.!=: false
-testing Any.##: 50
-testing Any.==: true
-testing Any.asInstanceOf: class scala.ScalaReflectionException: Any.asInstanceOf requires a type argument, it cannot be invoked with mirrors
-testing Any.asInstanceOf: class scala.ScalaReflectionException: scala.Any.asInstanceOf[T0]: T0 takes 0 arguments
-testing Any.equals: true
-testing Any.getClass: class java.lang.String
-testing Any.hashCode: 50
-testing Any.isInstanceOf: class scala.ScalaReflectionException: Any.isInstanceOf requires a type argument, it cannot be invoked with mirrors
-testing Any.isInstanceOf: class scala.ScalaReflectionException: scala.Any.isInstanceOf[T0]: Boolean takes 0 arguments
-testing Any.toString: 2
+Any
+it's important to print the list of Any's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+method !=: (x$1: Any)Boolean
+method ##: ()Int
+method ==: (x$1: Any)Boolean
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()java.lang.Class[_]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toString: ()java.lang.String
+testing Any.!=: false
+testing Any.##: 50
+testing Any.==: true
+testing Any.asInstanceOf: class scala.ScalaReflectionException: Any.asInstanceOf requires a type argument, it cannot be invoked with mirrors
+testing Any.asInstanceOf: class scala.ScalaReflectionException: scala.Any.asInstanceOf[T0]: T0 takes 0 arguments
+testing Any.equals: true
+testing Any.getClass: class java.lang.String
+testing Any.hashCode: 50
+testing Any.isInstanceOf: class scala.ScalaReflectionException: Any.isInstanceOf requires a type argument, it cannot be invoked with mirrors
+testing Any.isInstanceOf: class scala.ScalaReflectionException: scala.Any.isInstanceOf[T0]: Boolean takes 0 arguments
+testing Any.toString: 2
============
-AnyVal
-it's important to print the list of AnyVal's members
-if some of them change (possibly, adding and/or removing magic symbols), we must update this test
-constructor AnyVal: ()AnyVal
-method getClass: ()Class[_ <: AnyVal]
-testing AnyVal.<init>: class java.lang.InstantiationException: null
-testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member of class Integer, you provided method scala.AnyVal.getClass
+AnyVal
+it's important to print the list of AnyVal's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor AnyVal: ()AnyVal
+method getClass: ()Class[_ <: AnyVal]
+testing AnyVal.<init>: class java.lang.InstantiationException: null
+testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member of class Integer, you provided method scala.AnyVal.getClass
============
-AnyRef
-it's important to print the list of AnyRef's members
-if some of them change (possibly, adding and/or removing magic symbols), we must update this test
-constructor Object: ()java.lang.Object
-method !=: (x$1: Any)Boolean
-method !=: (x$1: AnyRef)Boolean
-method ##: ()Int
-method $asInstanceOf: [T0]()T0
-method $isInstanceOf: [T0]()Boolean
-method ==: (x$1: Any)Boolean
-method ==: (x$1: AnyRef)Boolean
-method asInstanceOf: [T0]=> T0
-method clone: ()java.lang.Object
-method eq: (x$1: AnyRef)Boolean
-method equals: (x$1: Any)Boolean
-method finalize: ()Unit
-method getClass: ()java.lang.Class[_]
-method hashCode: ()Int
-method isInstanceOf: [T0]=> Boolean
-method ne: (x$1: AnyRef)Boolean
-method notify: ()Unit
-method notifyAll: ()Unit
-method synchronized: [T0](x$1: T0)T0
-method toString: ()java.lang.String
-method wait: ()Unit
-method wait: (x$1: Long)Unit
-method wait: (x$1: Long, x$2: Int)Unit
-testing Object.!=: false
-testing Object.##: 50
-testing Object.$asInstanceOf: class scala.ScalaReflectionException: AnyRef.$asInstanceOf is an internal method, it cannot be invoked with mirrors
-testing Object.$asInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$asInstanceOf[T0](): T0 takes 0 arguments
-testing Object.$isInstanceOf: class scala.ScalaReflectionException: AnyRef.$isInstanceOf is an internal method, it cannot be invoked with mirrors
-testing Object.$isInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$isInstanceOf[T0](): Boolean takes 0 arguments
-testing Object.==: true
-testing Object.clone: class java.lang.CloneNotSupportedException: java.lang.String
-testing Object.eq: true
-testing Object.equals: true
-testing Object.finalize: ()
-testing Object.getClass: class java.lang.String
-testing Object.hashCode: 50
-testing Object.ne: false
-testing Object.notify: class java.lang.IllegalMonitorStateException: null
-testing Object.notifyAll: class java.lang.IllegalMonitorStateException: null
-testing Object.synchronized: 2
-testing Object.toString: 2
-TODO: also test AnyRef.wait overloads
+AnyRef
+it's important to print the list of AnyRef's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Object: ()java.lang.Object
+method !=: (x$1: Any)Boolean
+method !=: (x$1: AnyRef)Boolean
+method ##: ()Int
+method $asInstanceOf: [T0]()T0
+method $isInstanceOf: [T0]()Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x$1: AnyRef)Boolean
+method asInstanceOf: [T0]=> T0
+method clone: ()java.lang.Object
+method eq: (x$1: AnyRef)Boolean
+method equals: (x$1: Any)Boolean
+method finalize: ()Unit
+method getClass: ()java.lang.Class[_]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method ne: (x$1: AnyRef)Boolean
+method notify: ()Unit
+method notifyAll: ()Unit
+method synchronized: [T0](x$1: T0)T0
+method toString: ()java.lang.String
+method wait: ()Unit
+method wait: (x$1: Long)Unit
+method wait: (x$1: Long, x$2: Int)Unit
+testing Object.!=: false
+testing Object.##: 50
+testing Object.$asInstanceOf: class scala.ScalaReflectionException: AnyRef.$asInstanceOf is an internal method, it cannot be invoked with mirrors
+testing Object.$asInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$asInstanceOf[T0](): T0 takes 0 arguments
+testing Object.$isInstanceOf: class scala.ScalaReflectionException: AnyRef.$isInstanceOf is an internal method, it cannot be invoked with mirrors
+testing Object.$isInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$isInstanceOf[T0](): Boolean takes 0 arguments
+testing Object.==: true
+testing Object.clone: class java.lang.CloneNotSupportedException: java.lang.String
+testing Object.eq: true
+testing Object.equals: true
+testing Object.finalize: ()
+testing Object.getClass: class java.lang.String
+testing Object.hashCode: 50
+testing Object.ne: false
+testing Object.notify: class java.lang.IllegalMonitorStateException: null
+testing Object.notifyAll: class java.lang.IllegalMonitorStateException: null
+testing Object.synchronized: 2
+testing Object.toString: 2
+TODO: also test AnyRef.wait overloads
============
-Array
-it's important to print the list of Array's members
-if some of them change (possibly, adding and/or removing magic symbols), we must update this test
-constructor Array: (_length: Int)Array[T]
-constructor Object: ()java.lang.Object
-method !=: (x$1: Any)Boolean
-method !=: (x$1: AnyRef)Boolean
-method ##: ()Int
-method $asInstanceOf: [T0]()T0
-method $isInstanceOf: [T0]()Boolean
-method ==: (x$1: Any)Boolean
-method ==: (x$1: AnyRef)Boolean
-method apply: (i: <?>)T
-method asInstanceOf: [T0]=> T0
-method clone: ()Array[T]
-method eq: (x$1: AnyRef)Boolean
-method equals: (x$1: Any)Boolean
-method finalize: ()Unit
-method getClass: ()java.lang.Class[_]
-method hashCode: ()Int
-method isInstanceOf: [T0]=> Boolean
-method length: => Int
-method ne: (x$1: AnyRef)Boolean
-method notify: ()Unit
-method notifyAll: ()Unit
-method synchronized: [T0](x$1: T0)T0
-method toString: ()java.lang.String
-method update: (i: <?>, x: <?>)Unit
-method wait: ()Unit
-method wait: (x$1: Long)Unit
-method wait: (x$1: Long, x$2: Int)Unit
-value _length: Int
-testing Array.length: 2
-testing Array.apply: 1
-testing Array.update: ()
-testing Array.clone: List(1, 2)
+Array
+it's important to print the list of Array's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Array: (_length: Int)Array[T]
+constructor Object: ()java.lang.Object
+method !=: (x$1: Any)Boolean
+method !=: (x$1: AnyRef)Boolean
+method ##: ()Int
+method $asInstanceOf: [T0]()T0
+method $isInstanceOf: [T0]()Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x$1: AnyRef)Boolean
+method apply: (i: Int)T
+method asInstanceOf: [T0]=> T0
+method clone: ()Array[T]
+method eq: (x$1: AnyRef)Boolean
+method equals: (x$1: Any)Boolean
+method finalize: ()Unit
+method getClass: ()java.lang.Class[_]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method length: => Int
+method ne: (x$1: AnyRef)Boolean
+method notify: ()Unit
+method notifyAll: ()Unit
+method synchronized: [T0](x$1: T0)T0
+method toString: ()java.lang.String
+method update: (i: Int, x: T)Unit
+method wait: ()Unit
+method wait: (x$1: Long)Unit
+method wait: (x$1: Long, x$2: Int)Unit
+value _length: Int
+testing Array.length: 2
+testing Array.apply: 1
+testing Array.update: ()
+testing Array.clone: List(1, 2)
============
-Other
-testing String.+: 23
+Other
+testing String.+: 23
============
-CTM
-testing Predef.classOf: class scala.ScalaReflectionException: Predef.classOf is a compile-time function, it cannot be invoked with mirrors
-testing Predef.classOf: class scala.ScalaReflectionException: scala.Predef.classOf[T]: Class[T] takes 0 arguments
-testing Universe.reify: class scala.ScalaReflectionException: scala.reflect.base.Universe.reify is a macro, i.e. a compile-time function, it cannot be invoked with mirrors
+CTM
+testing Predef.classOf: class scala.ScalaReflectionException: Predef.classOf is a compile-time function, it cannot be invoked with mirrors
+testing Predef.classOf: class scala.ScalaReflectionException: scala.Predef.classOf[T]: Class[T] takes 0 arguments
+testing Universe.reify: class scala.ScalaReflectionException: scala.reflect.api.Universe.reify is a macro, i.e. a compile-time function, it cannot be invoked with mirrors
diff --git a/test/files/run/reflection-magicsymbols-invoke.scala b/test/files/run/reflection-magicsymbols-invoke.scala
index 61ecc6458d..b38d1be7b2 100644
--- a/test/files/run/reflection-magicsymbols-invoke.scala
+++ b/test/files/run/reflection-magicsymbols-invoke.scala
@@ -90,5 +90,5 @@ object Test extends App {
println("============\nCTM")
test(PredefModule.moduleClass.typeSignature, Predef, "classOf")
test(PredefModule.moduleClass.typeSignature, Predef, "classOf", typeOf[String])
- test(typeOf[scala.reflect.base.Universe], scala.reflect.runtime.universe, "reify", "2")
+ test(typeOf[scala.reflect.api.Universe], scala.reflect.runtime.universe, "reify", "2")
} \ No newline at end of file
diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check
index d2ef4ad3cd..d3cd26f25f 100644
--- a/test/files/run/reflection-magicsymbols-repl.check
+++ b/test/files/run/reflection-magicsymbols-repl.check
@@ -1,39 +1,39 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala> class A {
- def foo1(x: Int*) = ???
- def foo2(x: => Int) = ???
- def foo3(x: Any) = ???
- def foo4(x: AnyRef) = ???
- def foo5(x: AnyVal) = ???
- def foo6(x: Null) = ???
- def foo7(x: Nothing) = ???
- def foo8(x: Singleton) = ???
-}
-defined class A
-
-scala> def test(n: Int): Unit = {
- val sig = typeOf[A] member newTermName("foo" + n) typeSignature
- val x = sig.asInstanceOf[MethodType].params.head
- println(x.typeSignature)
-}
-warning: there were 1 feature warnings; re-run with -feature for details
-test: (n: Int)Unit
-
-scala> for (i <- 1 to 8) test(i)
-scala.Int*
-=> scala.Int
-scala.Any
-scala.AnyRef
-scala.AnyVal
-scala.Null
-scala.Nothing
-scala.Singleton
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> class A {
+ def foo1(x: Int*) = ???
+ def foo2(x: => Int) = ???
+ def foo3(x: Any) = ???
+ def foo4(x: AnyRef) = ???
+ def foo5(x: AnyVal) = ???
+ def foo6(x: Null) = ???
+ def foo7(x: Nothing) = ???
+ def foo8(x: Singleton) = ???
+}
+defined class A
+
+scala> def test(n: Int): Unit = {
+ val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ val x = sig.asInstanceOf[MethodType].params.head
+ println(x.typeSignature)
+}
+warning: there were 1 feature warnings; re-run with -feature for details
+test: (n: Int)Unit
+
+scala> for (i <- 1 to 8) test(i)
+scala.Int*
+=> scala.Int
+scala.Any
+scala.AnyRef
+scala.AnyVal
+scala.Null
+scala.Nothing
+scala.Singleton
+
+scala>
diff --git a/test/files/run/reflection-magicsymbols-vanilla.check b/test/files/run/reflection-magicsymbols-vanilla.check
index 4f4e8d94a9..d3ff152896 100644
--- a/test/files/run/reflection-magicsymbols-vanilla.check
+++ b/test/files/run/reflection-magicsymbols-vanilla.check
@@ -1,8 +1,8 @@
-Int*
-=> Int
-Any
-AnyRef
-AnyVal
-Null
-Nothing
-Singleton
+Int*
+=> Int
+Any
+AnyRef
+AnyVal
+Null
+Nothing
+Singleton
diff --git a/test/files/run/reflection-mem-glbs.scala b/test/files/run/reflection-mem-glbs.scala
new file mode 100644
index 0000000000..3f29a914bc
--- /dev/null
+++ b/test/files/run/reflection-mem-glbs.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ override def maxDelta = 10
+ override def calcsPerIter = 50000
+ override def calc() {
+ import scala.reflect.runtime.universe._
+ glb(List(typeOf[A], typeOf[B]))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/reflection-mem-tags.scala b/test/files/run/reflection-mem-tags.scala
new file mode 100644
index 0000000000..8815e7dcd8
--- /dev/null
+++ b/test/files/run/reflection-mem-tags.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ override def maxDelta = 10
+ override def calcsPerIter = 100000
+ override def calc() {
+ import scala.reflect.runtime.universe._
+ def foo = {
+ class A { def x = 2; def y: A = new A }
+ weakTypeOf[A { def z: Int }]
+ }
+ foo
+ }
+} \ No newline at end of file
diff --git a/test/files/run/reflection-mem-typecheck.scala b/test/files/run/reflection-mem-typecheck.scala
new file mode 100644
index 0000000000..a312c2c893
--- /dev/null
+++ b/test/files/run/reflection-mem-typecheck.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ lazy val tb = {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ cm.mkToolBox()
+ }
+
+ override def maxDelta = 10
+ override def calcsPerIter = 8
+ override def calc() {
+ var snippet = """
+ trait A { type T <: A }
+ trait B { type T <: B }
+ def foo[T](x: List[T]) = x
+ foo(List(new A {}, new B {}))
+ """.trim
+ snippet = snippet + "\n" + (List.fill(50)(snippet.split("\n").last) mkString "\n")
+ tb.typeCheck(tb.parse(snippet))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-params.check b/test/files/run/reflection-methodsymbol-params.check
index 11f349d52b..554e72d553 100644
--- a/test/files/run/reflection-methodsymbol-params.check
+++ b/test/files/run/reflection-methodsymbol-params.check
@@ -1,8 +1,8 @@
-List()
-List(List())
-List(List(value x))
-List(List(value x), List(value y))
-List()
-List(List())
-List(List(value x))
-List(List(value x), List(value y))
+List()
+List(List())
+List(List(value x))
+List(List(value x), List(value y))
+List()
+List(List())
+List(List(value x))
+List(List(value x), List(value y))
diff --git a/test/files/run/reflection-methodsymbol-returntype.check b/test/files/run/reflection-methodsymbol-returntype.check
index 0f30d1beaf..97ea02956d 100644
--- a/test/files/run/reflection-methodsymbol-returntype.check
+++ b/test/files/run/reflection-methodsymbol-returntype.check
@@ -1,8 +1,8 @@
-Int
-Int
-Int
-Int
-Int
-Int
-Int
-Int
+Int
+Int
+Int
+Int
+Int
+Int
+Int
+Int
diff --git a/test/files/run/reflection-methodsymbol-typeparams.check b/test/files/run/reflection-methodsymbol-typeparams.check
index c888e09a17..f04188775c 100644
--- a/test/files/run/reflection-methodsymbol-typeparams.check
+++ b/test/files/run/reflection-methodsymbol-typeparams.check
@@ -1,8 +1,8 @@
-List()
-List()
-List()
-List()
-List(type T)
-List(type T)
-List(type T)
-List(type T)
+List()
+List()
+List()
+List()
+List(type T)
+List(type T)
+List(type T)
+List(type T)
diff --git a/test/files/run/reflection-modulemirror-inner-badpath.check b/test/files/run/reflection-modulemirror-inner-badpath.check
index 1e990ec900..a8e7397c8f 100644
--- a/test/files/run/reflection-modulemirror-inner-badpath.check
+++ b/test/files/run/reflection-modulemirror-inner-badpath.check
@@ -1,2 +1,2 @@
-object R is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror
-()
+object R is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror
+()
diff --git a/test/files/run/reflection-modulemirror-inner-good.check b/test/files/run/reflection-modulemirror-inner-good.check
index fe658e7087..331bae08fb 100644
--- a/test/files/run/reflection-modulemirror-inner-good.check
+++ b/test/files/run/reflection-modulemirror-inner-good.check
@@ -1 +1 @@
-R
+R
diff --git a/test/files/run/reflection-modulemirror-nested-badpath.check b/test/files/run/reflection-modulemirror-nested-badpath.check
index f7980b9986..3ef94e1ec6 100644
--- a/test/files/run/reflection-modulemirror-nested-badpath.check
+++ b/test/files/run/reflection-modulemirror-nested-badpath.check
@@ -1,2 +1,2 @@
-object R is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror
-()
+object R is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror
+()
diff --git a/test/files/run/reflection-modulemirror-nested-good.check b/test/files/run/reflection-modulemirror-nested-good.check
index fe658e7087..331bae08fb 100644
--- a/test/files/run/reflection-modulemirror-nested-good.check
+++ b/test/files/run/reflection-modulemirror-nested-good.check
@@ -1 +1 @@
-R
+R
diff --git a/test/files/run/reflection-modulemirror-toplevel-badpath.check b/test/files/run/reflection-modulemirror-toplevel-badpath.check
index f7980b9986..3ef94e1ec6 100644
--- a/test/files/run/reflection-modulemirror-toplevel-badpath.check
+++ b/test/files/run/reflection-modulemirror-toplevel-badpath.check
@@ -1,2 +1,2 @@
-object R is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror
-()
+object R is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror
+()
diff --git a/test/files/run/reflection-repl-classes.check b/test/files/run/reflection-repl-classes.check
index 1c7f86c90c..2dd96a93bf 100644
--- a/test/files/run/reflection-repl-classes.check
+++ b/test/files/run/reflection-repl-classes.check
@@ -1,35 +1,35 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> class A
-defined class A
-
-scala>
-
-scala> class B {
- def foo(x: A) = 1
-}
-defined class B
-
-scala>
-
-scala> object defs {
- val cm = reflect.runtime.currentMirror
- val u = cm.universe
- val im = cm.reflect(new B)
- val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
- val mm = im.reflectMethod(method)
-}
-defined module defs
-
-scala> import defs._
-import defs._
-
-scala>
-
-scala> mm(new A)
-res0: Any = 1
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class A
+defined class A
+
+scala>
+
+scala> class B {
+ def foo(x: A) = 1
+}
+defined class B
+
+scala>
+
+scala> object defs {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val im = cm.reflect(new B)
+ val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+ val mm = im.reflectMethod(method)
+}
+defined module defs
+
+scala> import defs._
+import defs._
+
+scala>
+
+scala> mm(new A)
+res0: Any = 1
+
+scala>
diff --git a/test/files/run/reflection-repl-elementary.check b/test/files/run/reflection-repl-elementary.check
index 341dd10ab0..d9133f7e3e 100644
--- a/test/files/run/reflection-repl-elementary.check
+++ b/test/files/run/reflection-repl-elementary.check
@@ -1,9 +1,9 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> scala.reflect.runtime.universe.typeOf[List[Nothing]]
-res0: reflect.runtime.universe.Type = scala.List[Nothing]
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> scala.reflect.runtime.universe.typeOf[List[Nothing]]
+res0: reflect.runtime.universe.Type = scala.List[Nothing]
+
+scala>
diff --git a/test/files/run/reflection-sorted-decls.check b/test/files/run/reflection-sorted-decls.check
index 9a9832a683..415e073149 100644
--- a/test/files/run/reflection-sorted-decls.check
+++ b/test/files/run/reflection-sorted-decls.check
@@ -1,7 +1,3 @@
-value a
-value b
-value c
-method c
-method b
-method a
-constructor Foo$1
+value a
+value b
+value c
diff --git a/test/files/run/reflection-sorted-decls.scala b/test/files/run/reflection-sorted-decls.scala
index 242f17d9bb..5616e10b3b 100644
--- a/test/files/run/reflection-sorted-decls.scala
+++ b/test/files/run/reflection-sorted-decls.scala
@@ -3,6 +3,6 @@ object Test {
class Foo(val a: Int, val b: Int, val c: Int)
import scala.reflect.runtime.{currentMirror => cm}
val decls = cm.classSymbol(classOf[Foo]).typeSignature.declarations
- decls.sorted.toList foreach System.out.println
+ decls.sorted.toList.filter(!_.isMethod) foreach System.out.println
}
}
diff --git a/test/files/run/reflection-sorted-members.check b/test/files/run/reflection-sorted-members.check
index d58b691c42..c148e19e69 100644
--- a/test/files/run/reflection-sorted-members.check
+++ b/test/files/run/reflection-sorted-members.check
@@ -1,34 +1,4 @@
-value a
-value b
-value c
-method c
-method b
-method a
-constructor Foo$1
-value x
-method x
-constructor Bar$1
-method finalize
-method wait
-method wait
-method wait
-method equals
-method toString
-method hashCode
-method getClass
-method clone
-method notify
-method notifyAll
-constructor Object
-method eq
-method ne
-method ==
-method !=
-method ##
-method synchronized
-method $isInstanceOf
-method $asInstanceOf
-method ==
-method !=
-method isInstanceOf
-method asInstanceOf
+value a
+value b
+value c
+value x
diff --git a/test/files/run/reflection-sorted-members.scala b/test/files/run/reflection-sorted-members.scala
index 9980d79999..a8379234c0 100644
--- a/test/files/run/reflection-sorted-members.scala
+++ b/test/files/run/reflection-sorted-members.scala
@@ -6,6 +6,6 @@ object Test {
class Foo(val a: Int, val b: Int, val c: Int) extends Bar(a + b + c) with T1 with T2
import scala.reflect.runtime.{currentMirror => cm}
val members = cm.classSymbol(classOf[Foo]).typeSignature.members
- members.sorted.toList foreach System.out.println
+ members.sorted.toList.filter(!_.isMethod) foreach System.out.println
}
}
diff --git a/test/files/run/reflection-valueclasses-derived.check b/test/files/run/reflection-valueclasses-derived.check
index bfcfcade5e..3382d41556 100644
--- a/test/files/run/reflection-valueclasses-derived.check
+++ b/test/files/run/reflection-valueclasses-derived.check
@@ -1,3 +1,3 @@
-4
-class C
-C@2
+4
+class C
+C@2
diff --git a/test/files/run/reflection-valueclasses-standard.check b/test/files/run/reflection-valueclasses-standard.check
index 060ab55406..643c3d048c 100644
--- a/test/files/run/reflection-valueclasses-standard.check
+++ b/test/files/run/reflection-valueclasses-standard.check
@@ -1,27 +1,27 @@
-========byte========
-byte
-2
-========short========
-short
-2
-========int========
-int
-2
-========long========
-long
-2
-========float========
-float
-2.0
-========double========
-double
-2.0
-========char========
-char
-2
-========boolean========
-boolean
-true
-========void========
-void
-()
+========byte========
+byte
+2
+========short========
+short
+2
+========int========
+int
+2
+========long========
+long
+2
+========float========
+float
+2.0
+========double========
+double
+2.0
+========char========
+char
+2
+========boolean========
+boolean
+true
+========void========
+void
+()
diff --git a/test/files/run/reify-aliases.check b/test/files/run/reify-aliases.check
index 392dd59a1d..aa846b9bc6 100644
--- a/test/files/run/reify-aliases.check
+++ b/test/files/run/reify-aliases.check
@@ -1 +1 @@
-TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check
index 5a97983e54..1b0f3f2162 100644
--- a/test/files/run/reify-repl-fail-gracefully.check
+++ b/test/files/run/reify-repl-fail-gracefully.check
@@ -1,21 +1,21 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import language.experimental.macros
-import language.experimental.macros
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala>
-
-scala> reify
-<console>:12: error: macros cannot be partially applied
- reify
- ^
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala>
+
+scala> reify
+<console>:12: error: macros cannot be partially applied
+ reify
+ ^
+
+scala>
+
+scala>
diff --git a/test/files/run/reify-staticXXX.check b/test/files/run/reify-staticXXX.check
index 5762ec47d6..37102b29e1 100644
--- a/test/files/run/reify-staticXXX.check
+++ b/test/files/run/reify-staticXXX.check
@@ -1,24 +1,24 @@
-object
-object
-class
-class
-object > object
-object > object
-object > class
-object > class
-package > object
-package > object
-package > class
-package > class
-object
-object
-class
-class
-object > object
-object > object
-object > class
-object > class
-package > object
-package > object
-package > class
-package > class
+object
+object
+class
+class
+object > object
+object > object
+object > class
+object > class
+package > object
+package > object
+package > class
+package > class
+object
+object
+class
+class
+object > object
+object > object
+object > class
+object > class
+package > object
+package > object
+package > class
+package > class
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
index 9bbb0ab87a..244be27aa7 100644
--- a/test/files/run/reify_ann1a.check
+++ b/test/files/run/reify_ann1a.check
@@ -1,30 +1,30 @@
-{
- @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
- @new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b")) = _;
- def <init>(@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b"))) = {
- super.<init>();
- ()
- };
- @new ann(immutable.this.List.apply("5a")) @new ann(immutable.this.List.apply("5b")) def f(x: Int @ann(immutable.this.List.apply("6a")) @ann(immutable.this.List.apply("6b"))) = {
- @new ann(immutable.this.List.apply("7a")) @new ann(immutable.this.List.apply("7b")) val r = x.$plus(3): @ann(immutable.this.List.apply("8a")): @ann(immutable.this.List.apply("8b"));
- val s = (4: Int @ann(immutable.this.List.apply("9a")) @ann(immutable.this.List.apply("9b")));
- r.$plus(s)
- }
- };
- ()
-}
-{
- @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends AnyRef {
- @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
- def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
- C.super.<init>();
- ()
- };
- @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
- @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
- val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
- r.+(s)
- }
- };
- ()
-}
+{
+ @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+ @new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b")) = _;
+ def <init>(@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b"))) = {
+ super.<init>();
+ ()
+ };
+ @new ann(immutable.this.List.apply("5a")) @new ann(immutable.this.List.apply("5b")) def f(x: Int @ann(immutable.this.List.apply("6a")) @ann(immutable.this.List.apply("6b"))) = {
+ @new ann(immutable.this.List.apply("7a")) @new ann(immutable.this.List.apply("7b")) val r = x.$plus(3): @ann(immutable.this.List.apply("8a")): @ann(immutable.this.List.apply("8b"));
+ val s = (4: Int @ann(immutable.this.List.apply("9a")) @ann(immutable.this.List.apply("9b")));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends AnyRef {
+ @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
+ def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
+ @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
+ val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann1a.scala b/test/files/run/reify_ann1a.scala
index 9f994fb2eb..c23048e463 100644
--- a/test/files/run/reify_ann1a.scala
+++ b/test/files/run/reify_ann1a.scala
@@ -24,5 +24,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
index 4a51c02701..6a5f32a492 100644
--- a/test/files/run/reify_ann1b.check
+++ b/test/files/run/reify_ann1b.check
@@ -1,30 +1,30 @@
-{
- @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends AnyRef {
- @new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
- def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
- super.<init>();
- ()
- };
- @new ann(bar = "5a") @new ann(bar = "5b") def f(x: Int @ann(bar = "6a") @ann(bar = "6b")) = {
- @new ann(bar = "7a") @new ann(bar = "7b") val r = x.$plus(3): @ann(bar = "8a"): @ann(bar = "8b");
- val s = (4: Int @ann(bar = "9a") @ann(bar = "9b"));
- r.$plus(s)
- }
- };
- ()
-}
-{
- @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends AnyRef {
- @ann(bar = "3a") @ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _;
- def <init>(@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = {
- C.super.<init>();
- ()
- };
- @ann(bar = "5a") @ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = {
- @ann(bar = "7a") @ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.+(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a"));
- val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a"));
- r.+(s)
- }
- };
- ()
-}
+{
+ @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends AnyRef {
+ @new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
+ def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
+ super.<init>();
+ ()
+ };
+ @new ann(bar = "5a") @new ann(bar = "5b") def f(x: Int @ann(bar = "6a") @ann(bar = "6b")) = {
+ @new ann(bar = "7a") @new ann(bar = "7b") val r = x.$plus(3): @ann(bar = "8a"): @ann(bar = "8b");
+ val s = (4: Int @ann(bar = "9a") @ann(bar = "9b"));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends AnyRef {
+ @ann(bar = "3a") @ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _;
+ def <init>(@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(bar = "5a") @ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = {
+ @ann(bar = "7a") @ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.+(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a"));
+ val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a"));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala
index 3e0d3e0802..29ce6021a2 100644
--- a/test/files/run/reify_ann1b.scala
+++ b/test/files/run/reify_ann1b.scala
@@ -24,5 +24,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_ann2a.check b/test/files/run/reify_ann2a.check
index 4d67bbf786..934af54802 100644
--- a/test/files/run/reify_ann2a.check
+++ b/test/files/run/reify_ann2a.check
@@ -1,44 +1,44 @@
-{
- class ann extends StaticAnnotation {
- <paramaccessor> private[this] val bar: List[String] = _;
- def <init>(bar: List[String]) = {
- super.<init>();
- ()
- }
- };
- @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
- @new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b")) = _;
- def <init>(@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b"))) = {
- super.<init>();
- ()
- };
- @new ann(immutable.this.List.apply("5a")) @new ann(immutable.this.List.apply("5b")) def f(x: Int @ann(immutable.this.List.apply("6a")) @ann(immutable.this.List.apply("6b"))) = {
- @new ann(immutable.this.List.apply("7a")) @new ann(immutable.this.List.apply("7b")) val r = x.$plus(3): @ann(immutable.this.List.apply("8a")): @ann(immutable.this.List.apply("8b"));
- val s = (4: Int @ann(immutable.this.List.apply("9a")) @ann(immutable.this.List.apply("9b")));
- r.$plus(s)
- }
- };
- ()
-}
-{
- class ann extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
- <paramaccessor> private[this] val bar: List[String] = _;
- def <init>(bar: List[String]): ann = {
- ann.super.<init>();
- ()
- }
- };
- @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends AnyRef {
- @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
- def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
- C.super.<init>();
- ()
- };
- @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
- @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
- val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
- r.+(s)
- }
- };
- ()
-}
+{
+ class ann extends StaticAnnotation {
+ <paramaccessor> private[this] val bar: List[String] = _;
+ def <init>(bar: List[String]) = {
+ super.<init>();
+ ()
+ }
+ };
+ @new ann(immutable.this.List.apply("1a")) @new ann(immutable.this.List.apply("1b")) class C[@new ann(immutable.this.List.apply("2a")) @new ann(immutable.this.List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+ @new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b")) = _;
+ def <init>(@new ann(immutable.this.List.apply("3a")) @new ann(immutable.this.List.apply("3b")) x: T @ann(immutable.this.List.apply("4a")) @ann(immutable.this.List.apply("4b"))) = {
+ super.<init>();
+ ()
+ };
+ @new ann(immutable.this.List.apply("5a")) @new ann(immutable.this.List.apply("5b")) def f(x: Int @ann(immutable.this.List.apply("6a")) @ann(immutable.this.List.apply("6b"))) = {
+ @new ann(immutable.this.List.apply("7a")) @new ann(immutable.this.List.apply("7b")) val r = x.$plus(3): @ann(immutable.this.List.apply("8a")): @ann(immutable.this.List.apply("8b"));
+ val s = (4: Int @ann(immutable.this.List.apply("9a")) @ann(immutable.this.List.apply("9b")));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ class ann extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
+ <paramaccessor> private[this] val bar: List[String] = _;
+ def <init>(bar: List[String]): ann = {
+ ann.super.<init>();
+ ()
+ }
+ };
+ @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends AnyRef {
+ @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _;
+ def <init>(@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(immutable.this.List.apply[String]("5a")) @ann(immutable.this.List.apply[String]("5b")) def f(x: Int @ann(immutable.this.List.apply[String]("6b")) @ann(immutable.this.List.apply[String]("6a"))): Int = {
+ @ann(immutable.this.List.apply[String]("7a")) @ann(immutable.this.List.apply[String]("7b")) val r: Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")) = ((x.+(3): Int @ann(immutable.this.List.apply[String]("8a"))): Int @ann(immutable.this.List.apply[String]("8b")) @ann(immutable.this.List.apply[String]("8a")));
+ val s: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")) = (4: Int @ann(immutable.this.List.apply[String]("9b")) @ann(immutable.this.List.apply[String]("9a")));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann2a.scala b/test/files/run/reify_ann2a.scala
index 63a17ee192..53423e12c3 100644
--- a/test/files/run/reify_ann2a.scala
+++ b/test/files/run/reify_ann2a.scala
@@ -24,5 +24,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_ann3.check b/test/files/run/reify_ann3.check
index 96b4cf9c4e..8caceb2696 100644
--- a/test/files/run/reify_ann3.check
+++ b/test/files/run/reify_ann3.check
@@ -1,21 +1,21 @@
-{
- class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends AnyRef {
- @new inline @getter() final <paramaccessor> val key: A = _;
- def <init>(key: A) = {
- super.<init>();
- ()
- }
- };
- ()
-}
-{
- class Tree[A, B] extends AnyRef {
- final <paramaccessor> private[this] val key: A = _;
- @inline @scala.annotation.meta.getter final <stable> <accessor> <paramaccessor> def key: A = Tree.this.key;
- def <init>(key: A): Tree[A,B] = {
- Tree.super.<init>();
- ()
- }
- };
- ()
-}
+{
+ class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends AnyRef {
+ @new inline @getter() final <paramaccessor> val key: A = _;
+ def <init>(key: A) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ class Tree[A, B] extends AnyRef {
+ final <paramaccessor> private[this] val key: A = _;
+ @inline @scala.annotation.meta.getter final <stable> <accessor> <paramaccessor> def key: A = Tree.this.key;
+ def <init>(key: A): Tree[A,B] = {
+ Tree.super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann3.scala b/test/files/run/reify_ann3.scala
index dbb6a1b443..4162fa532f 100644
--- a/test/files/run/reify_ann3.scala
+++ b/test/files/run/reify_ann3.scala
@@ -18,5 +18,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_ann4.check b/test/files/run/reify_ann4.check
index 6f853053d2..8bf5fe3242 100644
--- a/test/files/run/reify_ann4.check
+++ b/test/files/run/reify_ann4.check
@@ -1,32 +1,32 @@
-{
- class D extends StaticAnnotation {
- def <init>() = {
- super.<init>();
- ()
- }
- };
- class C extends AnyRef {
- def <init>() = {
- super.<init>();
- ()
- }
- };
- val c1 = new C @D();
- ()
-}
-{
- class D extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
- def <init>(): D = {
- D.super.<init>();
- ()
- }
- };
- class C extends AnyRef {
- def <init>(): C = {
- C.super.<init>();
- ()
- }
- };
- val c1: C = new C @D();
- ()
-}
+{
+ class D extends StaticAnnotation {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ val c1 = new C @D();
+ ()
+}
+{
+ class D extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
+ def <init>(): D = {
+ D.super.<init>();
+ ()
+ }
+ };
+ class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ }
+ };
+ val c1: C = new C @D();
+ ()
+}
diff --git a/test/files/run/reify_ann4.scala b/test/files/run/reify_ann4.scala
index b4845d1586..0aedb77b5e 100644
--- a/test/files/run/reify_ann4.scala
+++ b/test/files/run/reify_ann4.scala
@@ -22,5 +22,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_ann5.check b/test/files/run/reify_ann5.check
index d443c601a0..1ec0457e54 100644
--- a/test/files/run/reify_ann5.check
+++ b/test/files/run/reify_ann5.check
@@ -1,22 +1,22 @@
-{
- class C extends AnyRef {
- @new inline @beanGetter() @new BeanProperty() <paramaccessor> val x: Int = _;
- def <init>(x: Int) = {
- super.<init>();
- ()
- }
- };
- ()
-}
-{
- class C extends AnyRef {
- @scala.beans.BeanProperty <paramaccessor> private[this] val x: Int = _;
- <stable> <accessor> <paramaccessor> def x: Int = C.this.x;
- def <init>(x: Int): C = {
- C.super.<init>();
- ()
- };
- @inline @scala.annotation.meta.beanGetter def getX(): Int = C.this.x
- };
- ()
-}
+{
+ class C extends AnyRef {
+ @new inline @beanGetter() @new BeanProperty() <paramaccessor> val x: Int = _;
+ def <init>(x: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ class C extends AnyRef {
+ @scala.beans.BeanProperty <paramaccessor> private[this] val x: Int = _;
+ <stable> <accessor> <paramaccessor> def x: Int = C.this.x;
+ def <init>(x: Int): C = {
+ C.super.<init>();
+ ()
+ };
+ @inline @scala.annotation.meta.beanGetter def getX(): Int = C.this.x
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann5.scala b/test/files/run/reify_ann5.scala
index 0ae8d317ce..d27be3b6d5 100644
--- a/test/files/run/reify_ann5.scala
+++ b/test/files/run/reify_ann5.scala
@@ -19,5 +19,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check
index 6f17b26158..0c919020a8 100644
--- a/test/files/run/reify_classfileann_a.check
+++ b/test/files/run/reify_classfileann_a.check
@@ -1,18 +1,18 @@
-{
- @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef {
- def <init>() = {
- super.<init>();
- ()
- }
- };
- ()
-}
-{
- @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends AnyRef {
- def <init>(): C = {
- C.super.<init>();
- ()
- }
- };
- ()
-}
+{
+ @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_classfileann_a.scala b/test/files/run/reify_classfileann_a.scala
index 0abb7b8154..1d51688e78 100644
--- a/test/files/run/reify_classfileann_a.scala
+++ b/test/files/run/reify_classfileann_a.scala
@@ -18,5 +18,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_classfileann_b.check b/test/files/run/reify_classfileann_b.check
index 33270e0035..c204fa8dc0 100644
--- a/test/files/run/reify_classfileann_b.check
+++ b/test/files/run/reify_classfileann_b.check
@@ -1,20 +1,20 @@
-{
- class C extends AnyRef {
- def <init>() = {
- super.<init>();
- ()
- };
- def x: Int = 2: @ann(bar = "1",quux = Array("2", "3"),baz = new ann(bar = "4"))
- };
- ()
-}
-{
- class C extends AnyRef {
- def <init>(): C = {
- C.super.<init>();
- ()
- };
- def x: Int = (2: Int(2) @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")))
- };
- ()
-}
+{
+ class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def x: Int = 2: @ann(bar = "1",quux = Array("2", "3"),baz = new ann(bar = "4"))
+ };
+ ()
+}
+{
+ class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ };
+ def x: Int = (2: Int(2) @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")))
+ };
+ ()
+}
diff --git a/test/files/run/reify_classfileann_b.scala b/test/files/run/reify_classfileann_b.scala
index 5ceb652a8b..ef19e9240b 100644
--- a/test/files/run/reify_classfileann_b.scala
+++ b/test/files/run/reify_classfileann_b.scala
@@ -22,5 +22,5 @@ object Test extends App {
println(ttree.toString)
// test 3: import and compile
- toolbox.runExpr(tree)
+ toolbox.eval(tree)
} \ No newline at end of file
diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala
index ce68975acc..af24a4b1e4 100644
--- a/test/files/run/reify_closure1.scala
+++ b/test/files/run/reify_closure1.scala
@@ -10,7 +10,7 @@ object Test extends App {
}}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala
index cb53e7ffa8..7a2cdb5e17 100644
--- a/test/files/run/reify_closure2a.scala
+++ b/test/files/run/reify_closure2a.scala
@@ -10,7 +10,7 @@ object Test extends App {
}}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala
index cf8c161afa..cb17c89501 100644
--- a/test/files/run/reify_closure3a.scala
+++ b/test/files/run/reify_closure3a.scala
@@ -12,7 +12,7 @@ object Test extends App {
}}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala
index 1521295f16..23436e0763 100644
--- a/test/files/run/reify_closure4a.scala
+++ b/test/files/run/reify_closure4a.scala
@@ -12,7 +12,7 @@ object Test extends App {
}}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala
index 84c2c08727..6b5089a4e5 100644
--- a/test/files/run/reify_closure5a.scala
+++ b/test/files/run/reify_closure5a.scala
@@ -10,7 +10,7 @@ object Test extends App {
}}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala
index d007e80899..cba035132d 100644
--- a/test/files/run/reify_closure6.scala
+++ b/test/files/run/reify_closure6.scala
@@ -17,7 +17,7 @@ object Test extends App {
}}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala
index 3d4956a724..2a7ce25e88 100644
--- a/test/files/run/reify_closure7.scala
+++ b/test/files/run/reify_closure7.scala
@@ -19,7 +19,7 @@ object Test extends App {
if (clo == null) {
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun.tree)
+ val dyn = toolbox.eval(fun.tree)
clo = dyn.asInstanceOf[Int => Int]
}
diff --git a/test/files/run/reify_closure8a.scala b/test/files/run/reify_closure8a.scala
index 8db3d38241..f303a7511c 100644
--- a/test/files/run/reify_closure8a.scala
+++ b/test/files/run/reify_closure8a.scala
@@ -9,7 +9,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(10).fun.tree)
+ val dyn = toolbox.eval(new Foo(10).fun.tree)
val foo = dyn.asInstanceOf[Int]
println(foo)
} \ No newline at end of file
diff --git a/test/files/run/reify_closure8b.check b/test/files/run/reify_closure8b.check
index 585e6d5740..5a7863ff15 100644
--- a/test/files/run/reify_closure8b.check
+++ b/test/files/run/reify_closure8b.check
@@ -1,3 +1,3 @@
-scala.tools.reflect.ToolBoxError: reflective compilation has failed:
-
-value y is not a member of Test.Foo
+scala.tools.reflect.ToolBoxError: reflective compilation has failed:
+
+value y is not a member of Test.Foo
diff --git a/test/files/run/reify_closure8b.scala b/test/files/run/reify_closure8b.scala
index 9cdc0e997f..c693cb490e 100644
--- a/test/files/run/reify_closure8b.scala
+++ b/test/files/run/reify_closure8b.scala
@@ -11,7 +11,7 @@ object Test extends App {
}
try {
- val dyn = cm.mkToolBox().runExpr(new Foo(10).fun.tree)
+ val dyn = cm.mkToolBox().eval(new Foo(10).fun.tree)
val foo = dyn.asInstanceOf[Int]
println(foo)
} catch {
diff --git a/test/files/run/reify_closures10.scala b/test/files/run/reify_closures10.scala
index 2d9b833851..a2740c8362 100644
--- a/test/files/run/reify_closures10.scala
+++ b/test/files/run/reify_closures10.scala
@@ -9,5 +9,5 @@ object Test extends App {
val code = reify{println(x + y); x + y}
val toolbox = cm.mkToolBox()
- println(toolbox.runExpr(code.tree))
+ println(toolbox.eval(code.tree))
} \ No newline at end of file
diff --git a/test/files/run/reify_copypaste1.check b/test/files/run/reify_copypaste1.check
index 49a25ea881..b204f9c454 100644
--- a/test/files/run/reify_copypaste1.check
+++ b/test/files/run/reify_copypaste1.check
@@ -1,2 +1,2 @@
-List(1, 2)
-
+List(1, 2)
+
diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala
index 7eaa4fa7b5..c597b7af19 100644
--- a/test/files/run/reify_copypaste1.scala
+++ b/test/files/run/reify_copypaste1.scala
@@ -11,9 +11,9 @@ object Test extends App {
val toolBox = currentMirror.mkToolBox(options = "-Yreify-copypaste")
val reify = Select(Select(Select(Select(Ident(ScalaPackage), newTermName("reflect")), newTermName("runtime")), newTermName("universe")), newTermName("reify"))
val reifee = Block(List(ValDef(Modifiers(LAZY), newTermName("x"), TypeTree(), Apply(Ident(ListModule), List(Literal(Constant(1)), Literal(Constant(2)))))), Ident(newTermName("x")))
- toolBox.runExpr(Apply(reify, List(reifee)))
- val Block(List(tpeCopypaste), exprCopypaste @ ModuleDef(_, _, Template(_, _, (_ :: stats) :+ expr))) = toolBox.parseExpr(output.toString())
+ toolBox.eval(Apply(reify, List(reifee)))
+ val Block(List(tpeCopypaste), exprCopypaste @ ModuleDef(_, _, Template(_, _, (_ :: stats) :+ expr))) = toolBox.parse(output.toString())
output.reset()
- toolBox.runExpr(Block(stats, expr))
+ toolBox.eval(Block(stats, expr))
stdout.println(output.toString)
} \ No newline at end of file
diff --git a/test/files/run/reify_getter.scala b/test/files/run/reify_getter.scala
index 26767603a0..cb04ddffde 100644
--- a/test/files/run/reify_getter.scala
+++ b/test/files/run/reify_getter.scala
@@ -13,6 +13,6 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala b/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala
index 8ea92c511b..76f935ecd2 100644
--- a/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala
+++ b/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala
@@ -13,6 +13,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala
index 7ff4f84de0..e7c5cb71c1 100644
--- a/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala
@@ -11,6 +11,6 @@ object Test extends App {
val code = reify{outer.eval.eval}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala
index 7f1f9d8478..770fcccd15 100644
--- a/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala
@@ -16,6 +16,6 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala
index 65e0931b6e..32e7e9003b 100644
--- a/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala
@@ -13,6 +13,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_nested_inner_refers_to_global.check b/test/files/run/reify_nested_inner_refers_to_global.check
index 7ff4c83d37..5ef4ff4d04 100644
--- a/test/files/run/reify_nested_inner_refers_to_global.check
+++ b/test/files/run/reify_nested_inner_refers_to_global.check
@@ -1 +1 @@
-evaluated = 2
+evaluated = 2
diff --git a/test/files/run/reify_nested_inner_refers_to_global.scala b/test/files/run/reify_nested_inner_refers_to_global.scala
index f45c1daed9..877222f5bf 100644
--- a/test/files/run/reify_nested_inner_refers_to_global.scala
+++ b/test/files/run/reify_nested_inner_refers_to_global.scala
@@ -12,6 +12,6 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_nested_inner_refers_to_local.scala b/test/files/run/reify_nested_inner_refers_to_local.scala
index 4a3d8bb02b..703474e07e 100644
--- a/test/files/run/reify_nested_inner_refers_to_local.scala
+++ b/test/files/run/reify_nested_inner_refers_to_local.scala
@@ -12,6 +12,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_nested_outer_refers_to_global.check b/test/files/run/reify_nested_outer_refers_to_global.check
index 7ff4c83d37..5ef4ff4d04 100644
--- a/test/files/run/reify_nested_outer_refers_to_global.check
+++ b/test/files/run/reify_nested_outer_refers_to_global.check
@@ -1 +1 @@
-evaluated = 2
+evaluated = 2
diff --git a/test/files/run/reify_nested_outer_refers_to_global.scala b/test/files/run/reify_nested_outer_refers_to_global.scala
index b628975e59..e40c569ce6 100644
--- a/test/files/run/reify_nested_outer_refers_to_global.scala
+++ b/test/files/run/reify_nested_outer_refers_to_global.scala
@@ -14,6 +14,6 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_nested_outer_refers_to_local.check b/test/files/run/reify_nested_outer_refers_to_local.check
index 7ff4c83d37..5ef4ff4d04 100644
--- a/test/files/run/reify_nested_outer_refers_to_local.check
+++ b/test/files/run/reify_nested_outer_refers_to_local.check
@@ -1 +1 @@
-evaluated = 2
+evaluated = 2
diff --git a/test/files/run/reify_nested_outer_refers_to_local.scala b/test/files/run/reify_nested_outer_refers_to_local.scala
index 80564fa9a2..12147c51da 100644
--- a/test/files/run/reify_nested_outer_refers_to_local.scala
+++ b/test/files/run/reify_nested_outer_refers_to_local.scala
@@ -14,6 +14,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_newimpl_11.check b/test/files/run/reify_newimpl_11.check
index 21bd28667e..2f5cb581e6 100644
--- a/test/files/run/reify_newimpl_11.check
+++ b/test/files/run/reify_newimpl_11.check
@@ -1,2 +1,2 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_11.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
+unresolved free type variables (namely: T defined by C in reify_newimpl_11.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_13.check b/test/files/run/reify_newimpl_13.check
index e28732bd6a..d518cd7b84 100644
--- a/test/files/run/reify_newimpl_13.check
+++ b/test/files/run/reify_newimpl_13.check
@@ -1,2 +1,2 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_13.scala:7:13). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
+unresolved free type variables (namely: T defined by C in reify_newimpl_13.scala:7:13). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_18.check b/test/files/run/reify_newimpl_18.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_18.check
+++ b/test/files/run/reify_newimpl_18.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_19.check b/test/files/run/reify_newimpl_19.check
index 1d96992869..8b8652f92c 100644
--- a/test/files/run/reify_newimpl_19.check
+++ b/test/files/run/reify_newimpl_19.check
@@ -1,2 +1,2 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_19.scala:7:10). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
+unresolved free type variables (namely: T defined by C in reify_newimpl_19.scala:7:10). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_20.check b/test/files/run/reify_newimpl_20.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_20.check
+++ b/test/files/run/reify_newimpl_20.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_21.check b/test/files/run/reify_newimpl_21.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_21.check
+++ b/test/files/run/reify_newimpl_21.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check
index ebcf42f52f..dcb3e2889b 100644
--- a/test/files/run/reify_newimpl_22.check
+++ b/test/files/run/reify_newimpl_22.check
@@ -1,29 +1,29 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala> import scala.tools.reflect.ToolBox
-import scala.tools.reflect.ToolBox
-
-scala> import scala.tools.reflect.Eval
-import scala.tools.reflect.Eval
-
-scala> {
- val x = 2
- val code = reify {
- x
- }
- println(code.eval)
-}
-<console>:15: free term: Ident(newTermName("x")) defined by res0 in <console>:14:21
- val code = reify {
- ^
-2
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBox
+
+scala> import scala.tools.reflect.Eval
+import scala.tools.reflect.Eval
+
+scala> {
+ val x = 2
+ val code = reify {
+ x
+ }
+ println(code.eval)
+}
+<console>:15: free term: Ident(newTermName("x")) defined by res0 in <console>:14:21
+ val code = reify {
+ ^
+2
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check
index c8ed424c86..882124657e 100644
--- a/test/files/run/reify_newimpl_23.check
+++ b/test/files/run/reify_newimpl_23.check
@@ -1,28 +1,28 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala> import scala.tools.reflect.ToolBox
-import scala.tools.reflect.ToolBox
-
-scala> import scala.tools.reflect.Eval
-import scala.tools.reflect.Eval
-
-scala> def foo[T]{
- val code = reify {
- List[T]()
- }
- println(code.eval)
-}
-<console>:13: free type: Ident(newTypeName("T")) defined by foo in <console>:12:16
- val code = reify {
- ^
-foo: [T]=> Unit
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBox
+
+scala> import scala.tools.reflect.Eval
+import scala.tools.reflect.Eval
+
+scala> def foo[T]{
+ val code = reify {
+ List[T]()
+ }
+ println(code.eval)
+}
+<console>:13: free type: Ident(newTypeName("T")) defined by foo in <console>:12:16
+ val code = reify {
+ ^
+foo: [T]=> Unit
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check
index a688a98fbc..d1028b94c7 100644
--- a/test/files/run/reify_newimpl_25.check
+++ b/test/files/run/reify_newimpl_25.check
@@ -1,19 +1,19 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> {
- import scala.reflect.runtime.universe._
- val x = "2"
- val tt = implicitly[TypeTag[x.type]]
- println(tt)
-}
-<console>:11: free term: Ident(newTermName("x")) defined by res0 in <console>:10:21
- val tt = implicitly[TypeTag[x.type]]
- ^
-TypeTag[x.type]
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> {
+ import scala.reflect.runtime.universe._
+ val x = "2"
+ val tt = implicitly[TypeTag[x.type]]
+ println(tt)
+}
+<console>:11: free term: Ident(newTermName("x")) defined by res0 in <console>:10:21
+ val tt = implicitly[TypeTag[x.type]]
+ ^
+TypeTag[x.type]
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
index c006cb7ffb..347f6365aa 100644
--- a/test/files/run/reify_newimpl_26.check
+++ b/test/files/run/reify_newimpl_26.check
@@ -1,21 +1,21 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> def foo[T]{
- import scala.reflect.runtime.universe._
- val tt = implicitly[AbsTypeTag[List[T]]]
- println(tt)
-}
-<console>:9: free type: Ident(newTypeName("T")) defined by foo in <console>:7:16
- val tt = implicitly[AbsTypeTag[List[T]]]
- ^
-foo: [T]=> Unit
-
-scala> foo[Int]
-AbsTypeTag[scala.List[T]]
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def foo[T]{
+ import scala.reflect.runtime.universe._
+ val tt = implicitly[WeakTypeTag[List[T]]]
+ println(tt)
+}
+<console>:9: free type: Ident(newTypeName("T")) defined by foo in <console>:7:16
+ val tt = implicitly[WeakTypeTag[List[T]]]
+ ^
+foo: [T]=> Unit
+
+scala> foo[Int]
+WeakTypeTag[scala.List[T]]
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_26.scala b/test/files/run/reify_newimpl_26.scala
index a12d8a2970..af74d60e8b 100644
--- a/test/files/run/reify_newimpl_26.scala
+++ b/test/files/run/reify_newimpl_26.scala
@@ -5,7 +5,7 @@ object Test extends ReplTest {
def code = """
def foo[T]{
import scala.reflect.runtime.universe._
- val tt = implicitly[AbsTypeTag[List[T]]]
+ val tt = implicitly[WeakTypeTag[List[T]]]
println(tt)
}
foo[Int]
diff --git a/test/files/run/reify_newimpl_27.check b/test/files/run/reify_newimpl_27.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_27.check
+++ b/test/files/run/reify_newimpl_27.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_29.check b/test/files/run/reify_newimpl_29.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_29.check
+++ b/test/files/run/reify_newimpl_29.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_30.check b/test/files/run/reify_newimpl_30.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_30.check
+++ b/test/files/run/reify_newimpl_30.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_31.check b/test/files/run/reify_newimpl_31.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_31.check
+++ b/test/files/run/reify_newimpl_31.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_33.check b/test/files/run/reify_newimpl_33.check
index a7029974a4..c23af69b08 100644
--- a/test/files/run/reify_newimpl_33.check
+++ b/test/files/run/reify_newimpl_33.check
@@ -1 +1 @@
-List(2)
+List(2)
diff --git a/test/files/run/reify_newimpl_35.check b/test/files/run/reify_newimpl_35.check
index f8306003b5..5545e6e005 100644
--- a/test/files/run/reify_newimpl_35.check
+++ b/test/files/run/reify_newimpl_35.check
@@ -1,17 +1,17 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala> def foo[T: TypeTag] = reify{List[T]()}
-foo: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Expr[List[T]]
-
-scala> println(foo)
-Expr[List[Nothing]](immutable.this.Nil)
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> def foo[T: TypeTag] = reify{List[T]()}
+foo: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Expr[List[T]]
+
+scala> println(foo)
+Expr[List[Nothing]](immutable.this.Nil)
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_36.check b/test/files/run/reify_newimpl_36.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/reify_newimpl_36.check
+++ b/test/files/run/reify_newimpl_36.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/reify_newimpl_37.check b/test/files/run/reify_newimpl_37.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/reify_newimpl_37.check
+++ b/test/files/run/reify_newimpl_37.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/reify_newimpl_38.check b/test/files/run/reify_newimpl_38.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/reify_newimpl_38.check
+++ b/test/files/run/reify_newimpl_38.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/reify_newimpl_39.check b/test/files/run/reify_newimpl_39.check
index 2f562a182f..d81cc0710e 100644
--- a/test/files/run/reify_newimpl_39.check
+++ b/test/files/run/reify_newimpl_39.check
@@ -1 +1 @@
-42
+42
diff --git a/test/files/run/reify_newimpl_40.check b/test/files/run/reify_newimpl_40.check
index 94c5a65fe0..cc0001ab3b 100644
--- a/test/files/run/reify_newimpl_40.check
+++ b/test/files/run/reify_newimpl_40.check
@@ -1 +1 @@
-74088
+74088
diff --git a/test/files/run/reify_newimpl_45.scala b/test/files/run/reify_newimpl_45.scala
index cbae0839b2..2a6c68d441 100644
--- a/test/files/run/reify_newimpl_45.scala
+++ b/test/files/run/reify_newimpl_45.scala
@@ -9,7 +9,7 @@ object Test extends App {
println(code.tree.freeTypes)
val T = code.tree.freeTypes(0)
val tree = code.tree.substituteSymbols(List(T), List(definitions.StringClass))
- cm.mkToolBox().runExpr(tree)
+ cm.mkToolBox().eval(tree)
}
new C[String]
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index 07e99781e3..272856b962 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -14,7 +14,7 @@ object Test extends App {
val toolbox = cm.mkToolBox()
val tree = tree_printf(reify("hello %s").tree, reify("world").tree)
- val evaluated = toolbox.runExpr(tree)
+ val evaluated = toolbox.eval(tree)
assert(output.toString() == "hello world", output.toString() +" == hello world")
/*
diff --git a/test/files/run/reify_typerefs_1a.scala b/test/files/run/reify_typerefs_1a.scala
index 53033e210c..2e961f171d 100644
--- a/test/files/run/reify_typerefs_1a.scala
+++ b/test/files/run/reify_typerefs_1a.scala
@@ -13,6 +13,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_typerefs_1b.scala b/test/files/run/reify_typerefs_1b.scala
index 12604454ed..88bb864820 100644
--- a/test/files/run/reify_typerefs_1b.scala
+++ b/test/files/run/reify_typerefs_1b.scala
@@ -13,6 +13,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_typerefs_2a.scala b/test/files/run/reify_typerefs_2a.scala
index ffc3dfc942..3a1db1d80f 100644
--- a/test/files/run/reify_typerefs_2a.scala
+++ b/test/files/run/reify_typerefs_2a.scala
@@ -15,6 +15,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_typerefs_2b.scala b/test/files/run/reify_typerefs_2b.scala
index f5d1633d79..50082aa8d2 100644
--- a/test/files/run/reify_typerefs_2b.scala
+++ b/test/files/run/reify_typerefs_2b.scala
@@ -15,6 +15,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_typerefs_3a.scala b/test/files/run/reify_typerefs_3a.scala
index 67b2c2d8aa..682d6f01ac 100644
--- a/test/files/run/reify_typerefs_3a.scala
+++ b/test/files/run/reify_typerefs_3a.scala
@@ -15,6 +15,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/reify_typerefs_3b.scala b/test/files/run/reify_typerefs_3b.scala
index 41a0a667e2..c85072f55f 100644
--- a/test/files/run/reify_typerefs_3b.scala
+++ b/test/files/run/reify_typerefs_3b.scala
@@ -15,6 +15,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index c509434116..9d63ecde94 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -1,32 +1,32 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> :power
-** Power User mode enabled - BEEP WHIR GYVE **
-** :phase has been set to 'typer'. **
-** scala.tools.nsc._ has been imported **
-** global._, definitions._ also imported **
-** Try :help, :vals, power.<tab> **
-
-scala> // guarding against "error: reference to global is ambiguous"
-
-scala> global.emptyValDef // "it is imported twice in the same scope by ..."
-res0: $r.global.emptyValDef.type = private val _ = _
-
-scala> val tp = ArrayClass[scala.util.Random] // magic with tags
-tp: $r.global.Type = Array[scala.util.Random]
-
-scala> tp.memberType(Array_apply) // evidence
-res1: $r.global.Type = (i: Int)scala.util.Random
-
-scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
-m: $r.treedsl.global.Match =
-10 match {
- case 5 => false
- case _ => true
-}
-
-scala> typed(m).tpe // typed is in scope
-res2: $r.treedsl.global.Type = Boolean
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> // guarding against "error: reference to global is ambiguous"
+
+scala> global.emptyValDef // "it is imported twice in the same scope by ..."
+res0: $r.global.emptyValDef.type = private val _ = _
+
+scala> val tp = ArrayClass[scala.util.Random] // magic with tags
+tp: $r.global.Type = Array[scala.util.Random]
+
+scala> tp.memberType(Array_apply) // evidence
+res1: $r.global.Type = (i: Int)scala.util.Random
+
+scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
+m: $r.treedsl.global.Match =
+10 match {
+ case 5 => false
+ case _ => true
+}
+
+scala> typed(m).tpe // typed is in scope
+res2: $r.treedsl.global.Type = Boolean
+
+scala>
diff --git a/test/files/run/richWrapperEquals.scala b/test/files/run/richWrapperEquals.scala
index 44beb133b3..4a43617cde 100644
--- a/test/files/run/richWrapperEquals.scala
+++ b/test/files/run/richWrapperEquals.scala
@@ -1,10 +1,6 @@
object Test {
def main(args: Array[String]): Unit = {
- assert(intWrapper(5) == 5)
- assert(5 == intWrapper(5))
assert(5 == (5: java.lang.Integer))
assert((5: java.lang.Integer) == 5)
- assert((5: java.lang.Integer) == intWrapper(5))
- assert(intWrapper(5) == (5: java.lang.Integer))
}
}
diff --git a/test/files/run/runtimeEval2.check b/test/files/run/runtimeEval2.check
index 78c6baefdd..0cfbf08886 100644
--- a/test/files/run/runtimeEval2.check
+++ b/test/files/run/runtimeEval2.check
@@ -1 +1 @@
-2
+2
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
index 6fc3ffe051..7fca027614 100644
--- a/test/files/run/showraw_mods.check
+++ b/test/files/run/showraw_mods.check
@@ -1 +1 @@
-Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
index d3a1fddf48..dca272684e 100644
--- a/test/files/run/showraw_tree.check
+++ b/test/files/run/showraw_tree.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
index 0c189130cf..d25599c7fc 100644
--- a/test/files/run/showraw_tree_ids.check
+++ b/test/files/run/showraw_tree_ids.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Ident(newTypeName("String")#<id>), Ident(newTypeName("String")#<id>)))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Ident(newTypeName("String")#<id>), Ident(newTypeName("String")#<id>)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Ident(newTypeName("String")#<id>), Ident(newTypeName("String")#<id>)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Ident(newTypeName("String")#<id>), Ident(newTypeName("String")#<id>)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
index 7b4ab456b0..d0d4cd0058 100644
--- a/test/files/run/showraw_tree_kinds.check
+++ b/test/files/run/showraw_tree_kinds.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Ident(newTypeName("String")#TPE), Ident(newTypeName("String")#TPE)))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Ident(newTypeName("String")#TPE), Ident(newTypeName("String")#TPE)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Ident(newTypeName("String")#TPE), Ident(newTypeName("String")#TPE)))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Ident(newTypeName("String")#TPE), Ident(newTypeName("String")#TPE)))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
index 39e68dc881..92ee45a1e6 100644
--- a/test/files/run/showraw_tree_types_ids.check
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -1,10 +1,10 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
-[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()))))
-[3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
-[4] TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())
-Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
-[4] TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())
-[5] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())))
-[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()))))
-[7] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
+[4] TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[4] TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())
+[5] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(ThisType(scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
index 0a41f5185c..c6c20409dc 100644
--- a/test/files/run/showraw_tree_types_typed.check
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -1,10 +1,10 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))), TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))))))), nme.CONSTRUCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List()))))
-[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
-[4] TypeRef(ThisType(scala.Predef), newTypeName("String"), List())
-Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))), TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))))))), nme.CONSTRUCTOR), List())
-[4] TypeRef(ThisType(scala.Predef), newTypeName("String"), List())
-[5] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List())))
-[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List()))))
-[7] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))), TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))))))), nme.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List()))))
+[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
+[4] TypeRef(ThisType(scala.Predef), newTypeName("String"), List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))), TypeTree[4]().setOriginal(Ident[4](newTypeName("String"))))))), nme.CONSTRUCTOR), List())
+[4] TypeRef(ThisType(scala.Predef), newTypeName("String"), List())
+[5] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(ThisType(scala.Predef), newTypeName("String"), List()), TypeRef(ThisType(scala.Predef), newTypeName("String"), List()))))
+[7] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
index d3a1fddf48..dca272684e 100644
--- a/test/files/run/showraw_tree_types_untyped.check
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Ident(newTypeName("String")), Ident(newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
index 400b736704..46ccd4df8f 100644
--- a/test/files/run/showraw_tree_ultimate.check
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -1,10 +1,10 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()))))
-[3] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List())
-[4] TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())
-Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
-[4] TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())
-[5] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())))
-[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()))))
-[7] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List())
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List())
+[4] TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())
+Apply[5](Select[6](New[5](TypeTree[5]().setOriginal(AppliedTypeTree(Ident[7](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Ident[4](newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
+[4] TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())
+[5] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List())))
+[6] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()), TypeRef(ThisType(scala.Predef#<id>#MODC), newTypeName("String")#<id>#TPE, List()))))
+[7] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List())
diff --git a/test/files/run/static-annot/field.scala b/test/files/run/static-annot/field.scala
deleted file mode 100644
index a7d8158321..0000000000
--- a/test/files/run/static-annot/field.scala
+++ /dev/null
@@ -1,243 +0,0 @@
-
-
-
-import java.lang.reflect.Modifier
-import annotation.static
-import reflect._
-
-
-
-/* TEST 1 */
-
-/* A @static-annotated field in the companion object should yield
- * a static field in its companion class.
- */
-object Foo {
- @static val bar = 17
-}
-
-
-class Foo
-
-
-trait Check {
- def checkStatic(cls: Class[_]) {
- cls.getDeclaredFields.find(_.getName == "bar") match {
- case Some(f) => assert(Modifier.isStatic(f.getModifiers), "no static modifier")
- case None => assert(false, "no static field bar in class")
- }
- }
-
- def test(): Unit
-}
-
-
-object Test1 extends Check {
- def test() {
- checkStatic(classOf[Foo])
- assert(Foo.bar == 17, "Companion object field should be 17.")
- }
-}
-
-
-/* TEST 2 */
-
-class Foo2
-
-
-/** The order of declaring the class and its companion is inverted now. */
-object Foo2 {
- @static val bar = 199
-}
-
-
-object Test2 extends Check {
- def test() {
- checkStatic(Class.forName("Foo3"))
- assert(Foo3.bar == 1984, "Companion object field should be 1984.")
- }
-}
-
-
-/* TEST 3 */
-
-/** The case where there is no explicit companion class */
-object Foo3 {
- @static val bar = 1984
-}
-
-
-object Test3 extends Check {
- def test() {
- checkStatic(Class.forName("Foo3"))
- assert(Foo3.bar == 1984, "Companion object field should be 1984.")
- }
-}
-
-
-/* TEST 4 */
-
-/** We want to be able to generate atomic reference field updaters on the companion object
- * so that they are created only once per class declaration, but we want them to actually
- * be initialize __in the static initializer of the class itself__.
- * This is extremely important, because otherwise the creation of the ARFU fails, since it uses
- * trickery to detect the caller and compare it to the owner of the field being modified.
- * Previously, this used to be circumvented through the use of Java base classes. A pain.
- */
-class ArfuTarget {
- @volatile var strfield = ArfuTarget.STR
-
- def CAS(ov: String, nv: String): Boolean = {
- ArfuTarget.arfu.compareAndSet(this, ov, nv)
- }
-}
-
-
-object ArfuTarget {
- @static val arfu = java.util.concurrent.atomic.AtomicReferenceFieldUpdater.newUpdater(classOf[ArfuTarget], classOf[String], "strfield")
- val STR = "Some string"
-}
-
-
-object Test4 extends Check {
- def checkArfu() {
- val at = new ArfuTarget
- assert(at.strfield == ArfuTarget.STR)
- at.CAS(ArfuTarget.STR, null)
- assert(at.strfield == null)
- }
-
- def test() {
- checkArfu()
- }
-}
-
-
-/* TEST 5 */
-
-/** Although our main use-case is to use final static fields, we should be able to use non-final too.
- * Here we set the static field of the class by using the setters in the companion object.
- * It is legal to do so using the reference to `Foo` directly (in which case the callsites
- * are rewritten to access the static field directly), or through an interface `Var` (in
- * which case the getter and the setter for `field` access the static field in `Var`).
- */
-trait Var {
- var field: Int
-}
-
-object VarHolder extends Var {
- @static var field = 1
-}
-
-
-object Test5 extends Check {
- def test() {
- assert(VarHolder.field == 1)
- VarHolder.field = 2
- assert(VarHolder.field == 2)
- val vh: Var = VarHolder
- vh.field = 3
- assert(vh.field == 3)
- }
-}
-
-
-/* TEST 6 */
-
-/** Here we test flattening the static ctor body and changing the owners of local definitions. */
-object Foo6 {
- var companionField = 101
- @static val staticField = {
- val intermediate = companionField + 1
- intermediate * 2
- }
-}
-
-
-object Test6 extends Check {
- def test() {
- assert(Foo6.staticField == 204)
- }
-}
-
-
-
-/* TEST 7 */
-
-/** Here we test objects nested in top-level objects */
-object Foo7 {
- object AndHisFriend {
- @static val bar = "string"
- }
- class AndHisFriend
-}
-
-
-object Test7 extends Check {
- def test() {
- checkStatic(classOf[Foo7.AndHisFriend])
- assert(Foo7.AndHisFriend.bar == "string")
- }
-}
-
-
-
-/* TEST 8 */
-
-object Foo8 {
- @static val field = 7
-
- val function: () => Int = () => {
- field + 1
- }
-
- val anon = new Runnable {
- def run() {
- assert(field == 7, "runnable asserting field is 7")
- }
- }
-
- @static var mutable = 10
-
- val mutation: () => Unit = () => {
- mutable += 1
- }
-}
-
-object Test8 {
- def test() {
- assert(Foo8.function() == 8, "function must return 8")
- Foo8.anon.run()
- assert(Foo8.mutable == 10, "mutable is 10")
- Foo8.mutation()
- assert(Foo8.mutable == 11, "mutable is 11")
- Foo8.mutation()
- assert(Foo8.mutable == 12, "mutable is 12")
- }
-}
-
-
-
-
-/* main */
-
-object Test {
-
- def main(args: Array[String]) {
- Test1.test()
- Test2.test()
- Test3.test()
- Test4.test()
- Test5.test()
- Test6.test()
- Test7.test()
- Test8.test()
- }
-
-}
-
-
-
-
-
-
diff --git a/test/files/run/stream-stack-overflow-filter-map.scala b/test/files/run/stream-stack-overflow-filter-map.scala
new file mode 100644
index 0000000000..f3a9dd49cb
--- /dev/null
+++ b/test/files/run/stream-stack-overflow-filter-map.scala
@@ -0,0 +1,44 @@
+import collection.generic.{FilterMonadic, CanBuildFrom}
+
+object Test extends App {
+ def mapSucc[Repr, That](s: FilterMonadic[Int, Repr])(implicit cbf: CanBuildFrom[Repr, Int, That]) = s map (_ + 1)
+ def flatMapId[T, Repr, That](s: FilterMonadic[T, Repr])(implicit cbf: CanBuildFrom[Repr, T, That]) = s flatMap (Seq(_))
+
+ def testStreamPred(s: Stream[Int])(p: Int => Boolean) {
+ val res1 = s withFilter p
+ val res2 = s filter p
+
+ val expected = s.toSeq filter p
+
+ val fMapped1 = flatMapId(res1)
+ val fMapped2 = flatMapId(res2)
+ assert(fMapped1 == fMapped2)
+ assert(fMapped1.toSeq == expected)
+
+ val mapped1 = mapSucc(res1)
+ val mapped2 = mapSucc(res2)
+ assert(mapped1 == mapped2)
+ assert(mapped1.toSeq == (expected map (_ + 1)))
+
+ assert((res1 map identity).toSeq == res2.toSeq)
+ }
+
+ def testStream(s: Stream[Int]) {
+ testStreamPred(s)(_ => false)
+ testStreamPred(s)(_ => true)
+ testStreamPred(s)(_ % 2 == 0)
+ testStreamPred(s)(_ % 3 == 0)
+ }
+
+ //Reduced version of the test case - either invocation used to cause a stack
+ //overflow before commit 80b3f433e5536d086806fa108ccdfacf10719cc2.
+ val resFMap = (1 to 10000).toStream withFilter (_ => false) flatMap (Seq(_))
+ val resMap = (1 to 10000).toStream withFilter (_ => false) map (_ + 1)
+
+ //Complete test case for withFilter + map/flatMap, as requested by @axel22.
+ for (j <- (0 to 3) :+ 10000) {
+ val stream = (1 to j).toStream
+ assert(stream.toSeq == (1 to j).toSeq)
+ testStream(stream)
+ }
+}
diff --git a/test/files/run/t1195-new.check b/test/files/run/t1195-new.check
index 4dd1661d1b..e0c9ac07ff 100644
--- a/test/files/run/t1195-new.check
+++ b/test/files/run/t1195-new.check
@@ -1,6 +1,6 @@
-Bar.type, underlying = <: scala.runtime.AbstractFunction1[Int,Bar] with Serializable{case def unapply(x$0: Bar): Option[Int]} with Singleton
-Bar, underlying = <: Product with Serializable{val x: Int; def copy(x: Int): Bar; def copy$default$1: Int; def _1: Int}
-Product with Serializable, underlying = Product with Serializable
-Bar.type, underlying = <: scala.runtime.AbstractFunction1[Int,Bar] with Serializable{case def unapply(x$0: Bar): Option[Int]} with Singleton
-Bar, underlying = <: Product with Serializable{val x: Int; def copy(x: Int): Bar; def copy$default$1: Int; def _1: Int}
-Product with Serializable, underlying = Product with Serializable
+Bar.type, underlying = <: scala.runtime.AbstractFunction1[Int,Bar] with Serializable{case def unapply(x$0: Bar): Option[Int]} with Singleton
+Bar, underlying = <: Product with Serializable{val x: Int; def copy(x: Int): Bar; def copy$default$1: Int; def _1: Int}
+Product with Serializable, underlying = Product with Serializable
+Bar.type, underlying = <: scala.runtime.AbstractFunction1[Int,Bar] with Serializable{case def unapply(x$0: Bar): Option[Int]} with Singleton
+Bar, underlying = <: Product with Serializable{val x: Int; def copy(x: Int): Bar; def copy$default$1: Int; def _1: Int}
+Product with Serializable, underlying = Product with Serializable
diff --git a/test/files/run/t1195-new.scala b/test/files/run/t1195-new.scala
index 4edfb5073f..0f62b140c9 100644
--- a/test/files/run/t1195-new.scala
+++ b/test/files/run/t1195-new.scala
@@ -9,7 +9,7 @@ object Test {
val g1 = g()
val h1 = h()
- def m[T: TypeTag](x: T) = println(typeOf[T] + ", underlying = " + typeOf[T].typeSymbol.typeSignature)
+ def m[T: WeakTypeTag](x: T) = println(weakTypeOf[T] + ", underlying = " + weakTypeOf[T].typeSymbol.typeSignature)
def main(args: Array[String]): Unit = {
m(f)
diff --git a/test/files/run/t1195-old.check b/test/files/run/t1195-old.check
index eb60eceb17..d023bc91f7 100644
--- a/test/files/run/t1195-old.check
+++ b/test/files/run/t1195-old.check
@@ -1,6 +1,6 @@
-_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
-_ <: Object with scala.Product with scala.Serializable
-Object with scala.Product with scala.Serializable
-_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
-_ <: Object with scala.Product with scala.Serializable
-Object with scala.Product with scala.Serializable
+_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
+_ <: Object with scala.Product with scala.Serializable
+Object with scala.Product with scala.Serializable
+_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
+_ <: Object with scala.Product with scala.Serializable
+Object with scala.Product with scala.Serializable
diff --git a/test/files/run/t1987b.check b/test/files/run/t1987b.check
new file mode 100644
index 0000000000..68d4b10e12
--- /dev/null
+++ b/test/files/run/t1987b.check
@@ -0,0 +1 @@
+ok!
diff --git a/test/files/run/t1987b/PullIteratees.scala b/test/files/run/t1987b/PullIteratees.scala
new file mode 100644
index 0000000000..a5a3e65d8f
--- /dev/null
+++ b/test/files/run/t1987b/PullIteratees.scala
@@ -0,0 +1,17 @@
+package scales.xml
+
+trait PullType
+class QName
+trait RetUrn[T]
+
+/**
+ * Iteratees related to pull parsing
+ */
+trait PullIteratees {
+ /**
+ * Without the overload it doesn't trigger the CCE, even though its
+ * not used
+ */
+ def iterate(path: List[QName], xml: String): RetUrn[String] = null
+ def iterate(path: List[QName], xml: Iterator[PullType]): RetUrn[String] = null
+}
diff --git a/test/files/run/t1987b/a.scala b/test/files/run/t1987b/a.scala
new file mode 100644
index 0000000000..c1be5fe3e0
--- /dev/null
+++ b/test/files/run/t1987b/a.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ scales.xml.CCE_Test.main(args)
+ println("ok!")
+ }
+}
diff --git a/test/files/run/t1987b/cce_test.scala b/test/files/run/t1987b/cce_test.scala
new file mode 100644
index 0000000000..4f9acf0264
--- /dev/null
+++ b/test/files/run/t1987b/cce_test.scala
@@ -0,0 +1,15 @@
+package scales.xml
+//import scales.xml._ // using another pacakge and importing doesn't CCE
+
+object CCE_Test {
+ def main(args: Array[String]): Unit = {
+ // without the import it doesn't trigger the CCE
+ import scaley.funny._
+
+ val pull = null.asInstanceOf[Iterator[PullType]]
+ val LogEntries = null.asInstanceOf[List[QName]]
+ // fully qualify with scales.xml. and it won't trigger it
+ iterate(LogEntries,
+ pull)
+ }
+}
diff --git a/test/files/run/t1987b/pkg1.scala b/test/files/run/t1987b/pkg1.scala
new file mode 100644
index 0000000000..6e749fc6b3
--- /dev/null
+++ b/test/files/run/t1987b/pkg1.scala
@@ -0,0 +1,4 @@
+package scaley
+
+package object funny {
+}
diff --git a/test/files/run/t1987b/pkg2.scala b/test/files/run/t1987b/pkg2.scala
new file mode 100644
index 0000000000..38056a199e
--- /dev/null
+++ b/test/files/run/t1987b/pkg2.scala
@@ -0,0 +1,3 @@
+package scales
+
+package object xml extends PullIteratees
diff --git a/test/files/run/t2296a.check b/test/files/run/t2296a.check
deleted file mode 100644
index f75aec9d81..0000000000
--- a/test/files/run/t2296a.check
+++ /dev/null
@@ -1,2 +0,0 @@
-J.foo()
-J.foo()
diff --git a/test/files/run/t2296b.check b/test/files/run/t2296b.check
deleted file mode 100644
index f75aec9d81..0000000000
--- a/test/files/run/t2296b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-J.foo()
-J.foo()
diff --git a/test/files/run/t2386-new.check b/test/files/run/t2386-new.check
index 98e226f946..8ed0ffd7d8 100644
--- a/test/files/run/t2386-new.check
+++ b/test/files/run/t2386-new.check
@@ -1,2 +1,2 @@
-a(0) = Array(1, 2)
-a(1) = Array("a", "b")
+a(0) = Array(1, 2)
+a(1) = Array("a", "b")
diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check
index 8d97a82799..ce31bc7408 100644
--- a/test/files/run/t2886.check
+++ b/test/files/run/t2886.check
@@ -1,5 +1,5 @@
-((x: String) => {
- val x$1 = x;
- val x$2 = x;
- Test.this.test(x$2, x$1)
-})
+((x: String) => {
+ val x$1 = x;
+ val x$2 = x;
+ Test.this.test(x$2, x$1)
+})
diff --git a/test/files/run/t3507-new.check b/test/files/run/t3507-new.check
index b02c40fc26..208e093004 100644
--- a/test/files/run/t3507-new.check
+++ b/test/files/run/t3507-new.check
@@ -1 +1 @@
-_1.b.c.type
+_1.b.c.type
diff --git a/test/files/run/t4110-new.check b/test/files/run/t4110-new.check
index 5cbfb27f3e..c0f646c5f6 100644
--- a/test/files/run/t4110-new.check
+++ b/test/files/run/t4110-new.check
@@ -1,2 +1,2 @@
-Test.A with Test.B
-Test.A with Test.B
+Test.A with Test.B
+Test.A with Test.B
diff --git a/test/files/run/t4216.check b/test/files/run/t4216.check
index 6f2684f42d..091e55a0c7 100644
--- a/test/files/run/t4216.check
+++ b/test/files/run/t4216.check
@@ -1,37 +1,37 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> import scala.reflect.ClassTag
-import scala.reflect.ClassTag
-
-scala> def f[A: ClassTag](a: A) = java.util.Arrays.asList(Array(a): _*)
-f: [A](a: A)(implicit evidence$1: scala.reflect.ClassTag[A])java.util.List[A]
-
-scala> f(".")
-res0: java.util.List[String] = [.]
-
-scala> f(0)
-res1: java.util.List[Int] = [0]
-
-scala> def i(a: Int) = java.util.Arrays.asList(Array(a): _*)
-i: (a: Int)java.util.List[Int]
-
-scala> i(0)
-res2: java.util.List[Int] = [0]
-
-scala> def o(a: Any) = java.util.Arrays.asList(Array(a): _*)
-o: (a: Any)java.util.List[Any]
-
-scala> o(".")
-res3: java.util.List[Any] = [.]
-
-scala> class V(val a: Int) extends AnyVal
-defined class V
-
-scala> f(new V(0))
-res4: java.util.List[V] = [V@0]
-
-scala> o(new V(0))
-res5: java.util.List[Any] = [V@0]
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import scala.reflect.ClassTag
+import scala.reflect.ClassTag
+
+scala> def f[A: ClassTag](a: A) = java.util.Arrays.asList(Array(a): _*)
+f: [A](a: A)(implicit evidence$1: scala.reflect.ClassTag[A])java.util.List[A]
+
+scala> f(".")
+res0: java.util.List[String] = [.]
+
+scala> f(0)
+res1: java.util.List[Int] = [0]
+
+scala> def i(a: Int) = java.util.Arrays.asList(Array(a): _*)
+i: (a: Int)java.util.List[Int]
+
+scala> i(0)
+res2: java.util.List[Int] = [0]
+
+scala> def o(a: Any) = java.util.Arrays.asList(Array(a): _*)
+o: (a: Any)java.util.List[Any]
+
+scala> o(".")
+res3: java.util.List[Any] = [.]
+
+scala> class V(val a: Int) extends AnyVal
+defined class V
+
+scala> f(new V(0))
+res4: java.util.List[V] = [V@0]
+
+scala> o(new V(0))
+res5: java.util.List[Any] = [V@0]
+
+scala>
diff --git a/test/files/run/t4813.scala b/test/files/run/t4813.scala
new file mode 100644
index 0000000000..6d48ca8758
--- /dev/null
+++ b/test/files/run/t4813.scala
@@ -0,0 +1,37 @@
+import collection.mutable._
+import reflect._
+
+
+object Test extends App {
+ def runTest[T, U](col: T)(clone: T => U)(mod: T => Unit)(implicit ct: ClassTag[T]): Unit = {
+ val cloned = clone(col)
+ assert(cloned == col, s"cloned should be equal to original. $cloned != $col")
+ mod(col)
+ assert(cloned != col, s"cloned should not modify when original does: $ct")
+ }
+
+ // Seqs
+ runTest(ArrayBuffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(ArraySeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(Buffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(DoubleLinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(IndexedSeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(LinearSeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(LinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(ListBuffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(MutableList(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(Queue(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(Stack(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+
+ // Sets
+ runTest(BitSet(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(HashSet(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(Set(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(SortedSet(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(TreeSet(1,2,3))(_.clone) { buf => buf add 4 }
+
+ // Maps
+ runTest(HashMap(1->1,2->2,3->3))(_.clone) { buf => buf put (4,4) }
+ runTest(WeakHashMap(1->1,2->2,3->3))(_.clone) { buf => buf put (4,4) }
+}
+
diff --git a/test/files/run/t4835.check b/test/files/run/t4835.check
index 531c3d7bb6..0987722d5f 100644
--- a/test/files/run/t4835.check
+++ b/test/files/run/t4835.check
@@ -1,7 +1,7 @@
--1 0 1 2 3 4 5 6 7 8 9
--1 1 3 5 7 9 11 13 15 17 19
-1 1
-2 1 2
-2 1 A 2
-3 1 2 3
-3 1 A 2 B 3
+-1 0 1 2 3 4 5 6 7 8 9
+-1 1 3 5 7 9 11 13 15 17 19
+1 1
+2 1 2
+2 1 A 2
+3 1 2 3
+3 1 A 2 B 3
diff --git a/test/files/run/t4835.scala b/test/files/run/t4835.scala
index 50d161be40..c964e42e7c 100644
--- a/test/files/run/t4835.scala
+++ b/test/files/run/t4835.scala
@@ -1,38 +1,38 @@
-/*
- * Test case for SI-4835. This tests confirm that the fix
- * doesn't break laziness. To test memory consumption,
- * I need to confirm that OutOfMemoryError doesn't occur.
- * I could create such tests. However, such tests consume
- * too much time and memory.
- */
-object Test {
- private final val INFINITE = -1
- def testStreamIterator(num: Int, stream: Stream[Int]): Unit = {
- val iter = stream.iterator
- print(num)
- // if num == -1, then steram is infinite sequence
- if (num == INFINITE) {
- for(i <- 0 until 10) {
- print(" " + iter.next())
- }
- } else {
- while(iter.hasNext) {
- print(" " + iter.next())
- }
- }
- println()
- }
-
- def main(args: Array[String]): Unit = {
- import Stream.{from, cons, empty}
- testStreamIterator(INFINITE, from(0))
- testStreamIterator(INFINITE, from(0).filter(_ % 2 == 1))
- testStreamIterator(1, Stream(1))
- testStreamIterator(2, Stream(1, 2))
- //Stream with side effect
- testStreamIterator(2, cons(1, cons({ print(" A"); 2}, empty)))
- testStreamIterator(3, Stream(1, 2, 3))
- //Stream with side effect
- testStreamIterator(3, cons(1, cons({ print(" A"); 2}, cons({ print(" B"); 3}, Stream.empty))))
- }
-}
+/*
+ * Test case for SI-4835. This tests confirm that the fix
+ * doesn't break laziness. To test memory consumption,
+ * I need to confirm that OutOfMemoryError doesn't occur.
+ * I could create such tests. However, such tests consume
+ * too much time and memory.
+ */
+object Test {
+ private final val INFINITE = -1
+ def testStreamIterator(num: Int, stream: Stream[Int]): Unit = {
+ val iter = stream.iterator
+ print(num)
+ // if num == -1, then steram is infinite sequence
+ if (num == INFINITE) {
+ for(i <- 0 until 10) {
+ print(" " + iter.next())
+ }
+ } else {
+ while(iter.hasNext) {
+ print(" " + iter.next())
+ }
+ }
+ println()
+ }
+
+ def main(args: Array[String]): Unit = {
+ import Stream.{from, cons, empty}
+ testStreamIterator(INFINITE, from(0))
+ testStreamIterator(INFINITE, from(0).filter(_ % 2 == 1))
+ testStreamIterator(1, Stream(1))
+ testStreamIterator(2, Stream(1, 2))
+ //Stream with side effect
+ testStreamIterator(2, cons(1, cons({ print(" A"); 2}, empty)))
+ testStreamIterator(3, Stream(1, 2, 3))
+ //Stream with side effect
+ testStreamIterator(3, cons(1, cons({ print(" A"); 2}, cons({ print(" B"); 3}, Stream.empty))))
+ }
+}
diff --git a/test/files/run/t4935.check b/test/files/run/t4935.check
index ef0493b275..ce01362503 100644
--- a/test/files/run/t4935.check
+++ b/test/files/run/t4935.check
@@ -1 +1 @@
-hello
+hello
diff --git a/test/files/run/t4935.scala b/test/files/run/t4935.scala
index 18631e2041..5940355b9b 100644
--- a/test/files/run/t4935.scala
+++ b/test/files/run/t4935.scala
@@ -1,9 +1,9 @@
-object Test extends App {
- for (i <- 0 to 1) {
- val a = Foo
- }
-}
-
-object Foo {
- println("hello")
-}
+object Test extends App {
+ for (i <- 0 to 1) {
+ val a = Foo
+ }
+}
+
+object Foo {
+ println("hello")
+}
diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check
new file mode 100644
index 0000000000..077006abd9
--- /dev/null
+++ b/test/files/run/t5064.check
@@ -0,0 +1,25 @@
+[12] T5064.super.<init>()
+[12] T5064.super.<init>
+[12] this
+[16:23] immutable.this.List.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
+[16:20] immutable.this.List.apply
+<16:20> immutable.this.List
+<16:20> immutable.this
+[16:23] scala.this.Predef.wrapIntArray(Array[Int]{1})
+[20] scala.this.Predef.wrapIntArray
+[20] scala.this.Predef
+[20] scala.this
+[26:32] collection.this.Seq.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
+[26:29] collection.this.Seq.apply
+<26:29> collection.this.Seq
+<26:29> collection.this
+[26:32] scala.this.Predef.wrapIntArray(Array[Int]{1})
+[29] scala.this.Predef.wrapIntArray
+[29] scala.this.Predef
+[29] scala.this
+[35:39] immutable.this.List
+<35:39> immutable.this
+[42:45] collection.this.Seq
+<42:45> collection.this
+[48:51] immutable.this.Nil
+<48:51> immutable.this
diff --git a/test/files/run/t5064.scala b/test/files/run/t5064.scala
new file mode 100644
index 0000000000..35f0951765
--- /dev/null
+++ b/test/files/run/t5064.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+ override def extraSettings = super.extraSettings + " -Yrangepos"
+ override def sources = List(
+ """|class T5064 {
+ | List(1)
+ | Seq(1)
+ | List
+ | Seq
+ | Nil
+ |}""".stripMargin
+ )
+ def check(source: String, unit: CompilationUnit) {
+ for (ClassDef(_, _, _, Template(_, _, stats)) <- unit.body ; stat <- stats ; t <- stat) {
+ t match {
+ case _: Select | _: Apply | _: This => println("%-15s %s".format(t.pos.show, t))
+ case _ =>
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t5224.check b/test/files/run/t5224.check
index ffeb7b43ce..e15c1c90eb 100644
--- a/test/files/run/t5224.check
+++ b/test/files/run/t5224.check
@@ -1,9 +1,9 @@
-{
- @new Foo(bar = "qwe") class C extends AnyRef {
- def <init>() = {
- super.<init>();
- ()
- }
- };
- ()
-}
+{
+ @new Foo(bar = "qwe") class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/t5225_1.check b/test/files/run/t5225_1.check
index 40db2468b1..1a47aacfd0 100644
--- a/test/files/run/t5225_1.check
+++ b/test/files/run/t5225_1.check
@@ -1,4 +1,4 @@
-{
- @new transient() @new volatile() var x = 2;
- ()
-}
+{
+ @new transient() @new volatile() var x = 2;
+ ()
+}
diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check
index 8cd2ddc1a4..8ed54a14bb 100644
--- a/test/files/run/t5225_2.check
+++ b/test/files/run/t5225_2.check
@@ -1,4 +1,4 @@
-{
- def foo(@new cloneable() x: Int) = "";
- ()
-}
+{
+ def foo(@new cloneable() x: Int) = "";
+ ()
+}
diff --git a/test/files/run/t5229_2.scala b/test/files/run/t5229_2.scala
index 75d7204911..f059b09772 100644
--- a/test/files/run/t5229_2.scala
+++ b/test/files/run/t5229_2.scala
@@ -13,6 +13,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/t5230.scala b/test/files/run/t5230.scala
index 5cd67766b4..f6a7817c0b 100644
--- a/test/files/run/t5230.scala
+++ b/test/files/run/t5230.scala
@@ -13,6 +13,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/t5256a.check b/test/files/run/t5256a.check
index 518663b3da..7e60139db3 100644
--- a/test/files/run/t5256a.check
+++ b/test/files/run/t5256a.check
@@ -1,6 +1,6 @@
-class A
-A
+class A
+A
Object {
- def <init>: <?>
- def foo: <?>
-}
+ def <init>(): A
+ def foo: Nothing
+}
diff --git a/test/files/run/t5256b.check b/test/files/run/t5256b.check
index d6015f2743..a80df6eb30 100644
--- a/test/files/run/t5256b.check
+++ b/test/files/run/t5256b.check
@@ -1,6 +1,6 @@
-class A
-Test.A
+class A
+Test.A
Object {
- def <init>: <?>
- def foo: <?>
-}
+ def <init>(): Test.A
+ def foo: Nothing
+}
diff --git a/test/files/run/t5256c.check b/test/files/run/t5256c.check
index 16bfc6c9d0..7fcd0eb722 100644
--- a/test/files/run/t5256c.check
+++ b/test/files/run/t5256c.check
@@ -1,6 +1,6 @@
-class A$1
-Test.A$1
+class A$1
+Test.A$1
java.lang.Object {
def foo(): Nothing
def <init>(): A$1
-}
+}
diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check
index dd32c05a93..9742ae572e 100644
--- a/test/files/run/t5256d.check
+++ b/test/files/run/t5256d.check
@@ -1,32 +1,32 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> import scala.reflect.runtime.universe._
-import scala.reflect.runtime.universe._
-
-scala> import scala.reflect.runtime.{currentMirror => cm}
-import scala.reflect.runtime.{currentMirror=>cm}
-
-scala> class A { def foo = ??? }
-defined class A
-
-scala> val c = cm.classSymbol(classOf[A])
-c: reflect.runtime.universe.ClassSymbol = class A
-
-scala> println(c)
-class A
-
-scala> println(c.fullName)
-$line8.$read.$iw.$iw.$iw.$iw.A
-
-scala> println(c.typeSignature)
-java.lang.Object {
- def <init>: <?>
- def foo: <?>
-}
-
-scala>
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> class A { def foo = ??? }
+defined class A
+
+scala> val c = cm.classSymbol(classOf[A])
+c: reflect.runtime.universe.ClassSymbol = class A
+
+scala> println(c)
+class A
+
+scala> println(c.fullName)
+$line8.$read.$iw.$iw.$iw.$iw.A
+
+scala> println(c.typeSignature)
+java.lang.Object {
+ def <init>(): A
+ def foo: scala.Nothing
+}
+
+scala>
+
+scala>
diff --git a/test/files/run/t5256e.check b/test/files/run/t5256e.check
index 6c6de90acc..011115720c 100644
--- a/test/files/run/t5256e.check
+++ b/test/files/run/t5256e.check
@@ -1,6 +1,6 @@
class A
Test.C.A
Object {
- def <init>: <?>
- def foo: <?>
+ def <init>(): C.this.A
+ def foo: Nothing
}
diff --git a/test/files/run/t5256f.check b/test/files/run/t5256f.check
index c840793fd5..e0fec85596 100644
--- a/test/files/run/t5256f.check
+++ b/test/files/run/t5256f.check
@@ -1,12 +1,12 @@
class A1
Test.A1
Object {
- def <init>: <?>
- def foo: <?>
+ def <init>(): Test.A1
+ def foo: Nothing
}
class A2
Test.A2
Object {
- def <init>: <?>
- def foo: <?>
+ def <init>(): Test.this.A2
+ def foo: Nothing
}
diff --git a/test/files/run/t5256g.check b/test/files/run/t5256g.check
index 5aac899694..c9c8d6e63d 100644
--- a/test/files/run/t5256g.check
+++ b/test/files/run/t5256g.check
@@ -1,3 +1,3 @@
-anonymous class $anon$1
-Test.$anon$1
-A with B{def <init>(): A with B}
+anonymous class $anon$1
+Test.$anon$1
+A with B{def <init>(): A with B}
diff --git a/test/files/run/t5256h.check b/test/files/run/t5256h.check
index 2a6b292486..1b23a71a4c 100644
--- a/test/files/run/t5256h.check
+++ b/test/files/run/t5256h.check
@@ -1,7 +1,7 @@
-anonymous class $anon$1
-Test.$anon$1
+anonymous class $anon$1
+Test.$anon$1
java.lang.Object {
final private val x: Int
def x(): Int
def <init>(): java.lang.Object{def x(): Int}
-}
+}
diff --git a/test/files/run/t5266_1.scala b/test/files/run/t5266_1.scala
index ee7ea6d335..7bf73ac988 100644
--- a/test/files/run/t5266_1.scala
+++ b/test/files/run/t5266_1.scala
@@ -10,6 +10,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/t5266_2.scala b/test/files/run/t5266_2.scala
index ca16f656ee..9b33910d00 100644
--- a/test/files/run/t5266_2.scala
+++ b/test/files/run/t5266_2.scala
@@ -11,6 +11,6 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- val evaluated = toolbox.runExpr(code.tree)
+ val evaluated = toolbox.eval(code.tree)
println("evaluated = " + evaluated)
} \ No newline at end of file
diff --git a/test/files/run/t5271_1.check b/test/files/run/t5271_1.check
index 5245173228..544b4d2762 100644
--- a/test/files/run/t5271_1.check
+++ b/test/files/run/t5271_1.check
@@ -1,12 +1,12 @@
-{
- case class C extends Product with Serializable {
- <caseaccessor> <paramaccessor> val foo: Int = _;
- <caseaccessor> <paramaccessor> val bar: Int = _;
- def <init>(foo: Int, bar: Int) = {
- super.<init>();
- ()
- }
- };
- ()
-}
-()
+{
+ case class C extends Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo: Int = _;
+ <caseaccessor> <paramaccessor> val bar: Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+()
diff --git a/test/files/run/t5271_2.check b/test/files/run/t5271_2.check
index 0765b3a6a4..585331be65 100644
--- a/test/files/run/t5271_2.check
+++ b/test/files/run/t5271_2.check
@@ -1,14 +1,14 @@
-{
- case class C extends Product with Serializable {
- <caseaccessor> <paramaccessor> val foo: Int = _;
- <caseaccessor> <paramaccessor> val bar: Int = _;
- def <init>(foo: Int, bar: Int) = {
- super.<init>();
- ()
- }
- };
- val c = C.apply(2, 2);
- scala.this.Predef.println(c.foo.$times(c.bar))
-}
-4
-()
+{
+ case class C extends Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo: Int = _;
+ <caseaccessor> <paramaccessor> val bar: Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ val c = C.apply(2, 2);
+ scala.this.Predef.println(c.foo.$times(c.bar))
+}
+4
+()
diff --git a/test/files/run/t5271_3.check b/test/files/run/t5271_3.check
index 3cfedbacd7..b02acd21f9 100644
--- a/test/files/run/t5271_3.check
+++ b/test/files/run/t5271_3.check
@@ -1,21 +1,21 @@
-{
- object C extends AnyRef {
- def <init>() = {
- super.<init>();
- ()
- };
- def qwe = 4
- };
- case class C extends Product with Serializable {
- <caseaccessor> <paramaccessor> val foo: Int = _;
- <caseaccessor> <paramaccessor> val bar: Int = _;
- def <init>(foo: Int, bar: Int) = {
- super.<init>();
- ()
- }
- };
- val c = C.apply(2, 2);
- scala.this.Predef.println(c.foo.$times(c.bar).$eq$eq(C.qwe))
-}
-true
-()
+{
+ object C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def qwe = 4
+ };
+ case class C extends Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo: Int = _;
+ <caseaccessor> <paramaccessor> val bar: Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ val c = C.apply(2, 2);
+ scala.this.Predef.println(c.foo.$times(c.bar).$eq$eq(C.qwe))
+}
+true
+()
diff --git a/test/files/run/t5334_1.scala b/test/files/run/t5334_1.scala
index 2b6418990a..3aeb7e4437 100644
--- a/test/files/run/t5334_1.scala
+++ b/test/files/run/t5334_1.scala
@@ -11,5 +11,5 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- println(toolbox.runExpr(code.tree))
+ println(toolbox.eval(code.tree))
} \ No newline at end of file
diff --git a/test/files/run/t5334_2.scala b/test/files/run/t5334_2.scala
index 815f78f951..64ee1e0acd 100644
--- a/test/files/run/t5334_2.scala
+++ b/test/files/run/t5334_2.scala
@@ -11,5 +11,5 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- println(toolbox.runExpr(code.tree))
+ println(toolbox.eval(code.tree))
} \ No newline at end of file
diff --git a/test/files/run/t5356.check b/test/files/run/t5356.check
index 21c4aef07b..7522e7ea74 100644
--- a/test/files/run/t5356.check
+++ b/test/files/run/t5356.check
@@ -1,6 +1,6 @@
-1 scala.runtime.RichInt
-1 scala.runtime.RichInt
+1 java.lang.Integer
+1 java.lang.Integer
1 scala.math.BigInt
-1 scala.runtime.RichDouble
-1 scala.runtime.RichFloat
+1 java.lang.Double
+1 java.lang.Float
1
diff --git a/test/files/run/t5356.scala b/test/files/run/t5356.scala
index f7696c6088..ec17e036ad 100644
--- a/test/files/run/t5356.scala
+++ b/test/files/run/t5356.scala
@@ -1,12 +1,12 @@
object Test {
- def f(x: { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName)
-
+ def f(x: Any { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName)
+
def main(args: Array[String]): Unit = {
f(1)
f(1.toInt)
f(BigInt(1))
f(1d)
f(1f)
- println((1: { def toInt: Int }).toInt)
+ println((1: (Any { def toInt: Int })).toInt)
}
}
diff --git a/test/files/run/t5418a.check b/test/files/run/t5418a.check
new file mode 100644
index 0000000000..527022936d
--- /dev/null
+++ b/test/files/run/t5418a.check
@@ -0,0 +1 @@
+Expr[Class[_ <: java.lang.Object]](new Object().getClass())
diff --git a/test/files/run/t5418a.scala b/test/files/run/t5418a.scala
new file mode 100644
index 0000000000..90bc542be6
--- /dev/null
+++ b/test/files/run/t5418a.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(scala.reflect.runtime.universe.reify(new Object().getClass))
+} \ No newline at end of file
diff --git a/test/files/run/t5418b.check b/test/files/run/t5418b.check
new file mode 100644
index 0000000000..48d82a2aae
--- /dev/null
+++ b/test/files/run/t5418b.check
@@ -0,0 +1,2 @@
+new Object().getClass()
+TypeRef(ThisType(java.lang), java.lang.Class, List(TypeRef(NoPrefix, newTypeName("?0"), List())))
diff --git a/test/files/run/t5418b.scala b/test/files/run/t5418b.scala
new file mode 100644
index 0000000000..08e8bb163b
--- /dev/null
+++ b/test/files/run/t5418b.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val untyped = reify(new Object().getClass).tree
+ val typed = tb.typeCheck(untyped)
+ println(typed)
+ println(showRaw(typed.tpe))
+} \ No newline at end of file
diff --git a/test/files/run/t5419.check b/test/files/run/t5419.check
index 50751b168e..a9c0f262e3 100644
--- a/test/files/run/t5419.check
+++ b/test/files/run/t5419.check
@@ -1 +1 @@
-5: @Foo.asInstanceOf[Int]
+5: @Foo.asInstanceOf[Int]
diff --git a/test/files/run/t5423.scala b/test/files/run/t5423.scala
index 9b8ba090fa..c1632126b2 100644
--- a/test/files/run/t5423.scala
+++ b/test/files/run/t5423.scala
@@ -7,5 +7,5 @@ final class table extends annotation.StaticAnnotation
object Test extends App {
val s = cm.classSymbol(classOf[A])
- println(s.getAnnotations)
+ println(s.annotations)
} \ No newline at end of file
diff --git a/test/files/run/t5680.check b/test/files/run/t5680.check
index 9fec3b6505..0d825ab7d0 100644
--- a/test/files/run/t5680.check
+++ b/test/files/run/t5680.check
@@ -1,3 +1,3 @@
-[Lscala.runtime.BoxedUnit
-()
-()
+[Lscala.runtime.BoxedUnit
+()
+()
diff --git a/test/files/run/t5704.check b/test/files/run/t5704.check
index 0f6c84d2ec..102e3209c6 100644
--- a/test/files/run/t5704.check
+++ b/test/files/run/t5704.check
@@ -1 +1 @@
-String
+String
diff --git a/test/files/run/t5713.check b/test/files/run/t5713.check
index d3e9348123..1419eb9d79 100644
--- a/test/files/run/t5713.check
+++ b/test/files/run/t5713.check
@@ -1 +1 @@
-err
+err
diff --git a/test/files/run/t5770.check b/test/files/run/t5770.check
new file mode 100644
index 0000000000..f00c965d83
--- /dev/null
+++ b/test/files/run/t5770.check
@@ -0,0 +1,10 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
diff --git a/test/files/run/t5770.scala b/test/files/run/t5770.scala
new file mode 100644
index 0000000000..b6c9236844
--- /dev/null
+++ b/test/files/run/t5770.scala
@@ -0,0 +1,25 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect._
+
+object Test extends App {
+ var i = 0
+ val action = reify { i += 1; println(i) }.tree
+
+ val tb1 = cm.mkToolBox()
+ tb1.eval(action)
+ tb1.eval(action)
+ tb1.eval(action)
+ tb1.frontEnd.reset()
+ tb1.eval(action)
+ tb1.eval(action)
+
+ val tb2 = cm.mkToolBox()
+ tb2.eval(action)
+ tb2.frontEnd.reset()
+ tb2.eval(action)
+ tb2.eval(action)
+ tb2.frontEnd.reset()
+ tb2.eval(action)
+ tb2.eval(action)
+}
diff --git a/test/files/run/t5816.check b/test/files/run/t5816.check
index 920b64a50d..8e58ace9b4 100644
--- a/test/files/run/t5816.check
+++ b/test/files/run/t5816.check
@@ -1 +1 @@
-5.+(Test.this.y)
+5.+(Test.this.y)
diff --git a/test/files/run/t5881.check b/test/files/run/t5881.check
index 477fb935a8..f4aeec680a 100644
--- a/test/files/run/t5881.check
+++ b/test/files/run/t5881.check
@@ -1,2 +1,2 @@
-ClassTag[class scala.collection.immutable.List]
-ClassTag[class scala.collection.immutable.List]
+scala.collection.immutable.List
+scala.collection.immutable.List
diff --git a/test/files/run/t5940.scala b/test/files/run/t5940.scala
new file mode 100644
index 0000000000..147ff38256
--- /dev/null
+++ b/test/files/run/t5940.scala
@@ -0,0 +1,41 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ def code = ???
+
+ def macros_1 = """
+ import scala.reflect.macros.Context
+
+ object Impls {
+ def impl(c: Context) = c.literalUnit
+ }
+
+ object Macros {
+ //import Impls._
+ def impl(c: Context) = c.literalUnit
+ def foo = macro impl
+ }
+ """
+ def compileMacros() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1)
+ }
+
+ def test_2 = """
+ object Test extends App {
+ println(Macros.foo)
+ }
+ """
+ def compileTest() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2)
+ }
+
+ def show(): Unit = {
+ log("Compiling Macros_1...")
+ if (compileMacros()) {
+ log("Compiling Test_2...")
+ if (compileTest()) log("Success!") else log("Failed...")
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t5942.check b/test/files/run/t5942.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t5942.check
diff --git a/test/files/run/t5942.scala b/test/files/run/t5942.scala
new file mode 100644
index 0000000000..44a8be93f6
--- /dev/null
+++ b/test/files/run/t5942.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect._
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ tb.parse("def x = {}")
+ try { tb.parse("def x = {") } catch { case _ => }
+ tb.parse("def x = {}")
+}
diff --git a/test/files/run/t5943a1.check b/test/files/run/t5943a1.check
new file mode 100644
index 0000000000..9f4d160af8
--- /dev/null
+++ b/test/files/run/t5943a1.check
@@ -0,0 +1 @@
+scala.this.Predef.intWrapper(1).to(3).map[Int, scala.collection.immutable.IndexedSeq[Int]](((x$1: Int) => x$1.+(1)))(immutable.this.IndexedSeq.canBuildFrom[Int])
diff --git a/test/files/run/t5943a1.scala b/test/files/run/t5943a1.scala
new file mode 100644
index 0000000000..00f4afa808
--- /dev/null
+++ b/test/files/run/t5943a1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("1 to 3 map (_+1)")
+ println(tb.typeCheck(expr))
+} \ No newline at end of file
diff --git a/test/files/run/t5943a2.check b/test/files/run/t5943a2.check
new file mode 100644
index 0000000000..29ad79c3ce
--- /dev/null
+++ b/test/files/run/t5943a2.check
@@ -0,0 +1 @@
+Vector(2, 3, 4)
diff --git a/test/files/run/t5943a2.scala b/test/files/run/t5943a2.scala
new file mode 100644
index 0000000000..fda800852d
--- /dev/null
+++ b/test/files/run/t5943a2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("1 to 3 map (_+1)")
+ println(tb.eval(expr))
+} \ No newline at end of file
diff --git a/test/files/run/t6052.scala b/test/files/run/t6052.scala
new file mode 100644
index 0000000000..385d5390d3
--- /dev/null
+++ b/test/files/run/t6052.scala
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+object Test extends App {
+ def seqarr(i: Int) = Array[Int]() ++ (0 until i)
+ def pararr(i: Int) = seqarr(i).par
+
+ def check[T](i: Int, f: Int => T) {
+ val gseq = seqarr(i).toSeq.groupBy(f)
+ val gpar = pararr(i).groupBy(f)
+ assert(gseq == gpar, (gseq, gpar))
+ }
+
+ for (i <- 0 until 20) check(i, _ > 0)
+ for (i <- 0 until 20) check(i, _ % 2)
+ for (i <- 0 until 20) check(i, _ % 4)
+}
diff --git a/test/files/run/t6086-repl.check b/test/files/run/t6086-repl.check
index f868aa18d0..97f20bd66b 100644
--- a/test/files/run/t6086-repl.check
+++ b/test/files/run/t6086-repl.check
@@ -1,12 +1,12 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala>
-
-scala> case class X(s: String)
-defined class X
-
-scala> scala.reflect.runtime.universe.typeOf[X]
-res0: reflect.runtime.universe.Type = X
-
-scala>
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> case class X(s: String)
+defined class X
+
+scala> scala.reflect.runtime.universe.typeOf[X]
+res0: reflect.runtime.universe.Type = X
+
+scala>
diff --git a/test/files/run/t6086-vanilla.check b/test/files/run/t6086-vanilla.check
index fd66be08d0..62d8fe9f6d 100644
--- a/test/files/run/t6086-vanilla.check
+++ b/test/files/run/t6086-vanilla.check
@@ -1 +1 @@
-X
+X
diff --git a/test/files/run/t6114.scala b/test/files/run/t6114.scala
new file mode 100644
index 0000000000..cb880ece00
--- /dev/null
+++ b/test/files/run/t6114.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ def testList = {
+ val list = new java.util.ArrayList[Int]
+ list.add(1)
+ list.add(2)
+ list.add(3)
+ import scala.collection.JavaConverters._
+ val next = list.asScala ++ List(4,5,6)
+ assert(next != list.asScala)
+
+ val raw = list.asScala
+ val cloned = raw.clone
+ list.add(1)
+ assert(raw != cloned)
+ }
+ def testSet = {
+ val set = new java.util.HashSet[Int]
+ set.add(1)
+ set.add(2)
+ set.add(3)
+ import scala.collection.JavaConverters._
+ val next = set.asScala ++ Set(4,5,6)
+ assert(next != set.asScala)
+
+ val raw = set.asScala
+ val cloned = raw.clone
+ set.add(4)
+ assert(raw != cloned)
+ }
+ def testMap = {
+ val map = new java.util.HashMap[Int,Int]
+ map.put(1,1)
+ map.put(2,2)
+ map.put(3,3)
+ import scala.collection.JavaConverters._
+ val next = map.asScala ++ Map(4->4,5->5,6->6)
+ assert(next != map.asScala)
+
+ val raw = map.asScala
+ val cloned = raw.clone
+ map.put(4,4)
+ assert(raw != cloned)
+ }
+
+ def testCollection = {
+ val list: java.util.Collection[Int] = new java.util.ArrayDeque[Int]
+ list.add(1)
+ list.add(2)
+ list.add(3)
+ import scala.collection.JavaConverters._
+ val next = list.asScala ++ List(4,5,6)
+ assert(next != list.asScala)
+
+ // Note: Clone is hidden at this level, so no overriden cloning.
+ }
+
+ testList
+ testSet
+ testMap
+ testCollection
+}
diff --git a/test/files/run/t6188.check b/test/files/run/t6188.check
index 1af3932ecd..5d64afc47b 100644
--- a/test/files/run/t6188.check
+++ b/test/files/run/t6188.check
@@ -1 +1 @@
-Failure(java.lang.Exception: this is an exception)
+Failure(java.lang.Exception: this is an exception)
diff --git a/test/files/run/t6197.check b/test/files/run/t6197.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t6197.check
diff --git a/test/files/run/t6197.scala b/test/files/run/t6197.scala
new file mode 100644
index 0000000000..5ab4b002d7
--- /dev/null
+++ b/test/files/run/t6197.scala
@@ -0,0 +1,21 @@
+import scala.collection.immutable._
+
+object Test extends App {
+
+ // test that a HashTrieSet with one leaf element is not created!
+ val x = HashSet.empty + 1 + 2
+ if(x.getClass.getSimpleName != "HashTrieSet")
+ println("A hash set containing two non-colliding values should be a HashTrieSet")
+
+ val y = x - 1
+ if(y.getClass.getSimpleName != "HashSet1")
+ println("A hash set containing one element should always use HashSet1")
+
+ // it is pretty hard to test that the case where a HashTrieSet has one element which
+ // is itself of type HashTrieS t. That is because the improve hash function makes it very difficult
+ // to find keys that will have hashes that are close together.
+ //
+ // However, it is also not necessary. Removing the ability of a HashTrieSet to have
+ // one child of type HashTrieSet completely breaks the HashSet, so that many other
+ // tests fail
+}
diff --git a/test/files/run/t6198.check b/test/files/run/t6198.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t6198.check
diff --git a/test/files/run/t6198.scala b/test/files/run/t6198.scala
new file mode 100644
index 0000000000..5aa8f1c1cf
--- /dev/null
+++ b/test/files/run/t6198.scala
@@ -0,0 +1,24 @@
+import scala.collection.immutable._
+
+object Test extends App {
+ // test that ListSet.tail does not use a builder
+ // we can't test for O(1) behavior, so the best we can do is to
+ // check that ls.tail always returns the same instance
+ val ls = ListSet.empty[Int] + 1 + 2
+
+ if(ls.tail ne ls.tail)
+ println("ListSet.tail should not use a builder!")
+
+ // class that always causes hash collisions
+ case class Collision(value:Int) { override def hashCode = 0 }
+
+ // create a set that should have a collison
+ val x = HashSet.empty + Collision(0) + Collision(1)
+ if(x.getClass.getSimpleName != "HashSetCollision1")
+ println("HashSet of size >1 with collisions should use HashSetCollision")
+
+ // remove the collision again by removing all but one element
+ val y = x - Collision(0)
+ if(y.getClass.getSimpleName != "HashSet1")
+ println("HashSet of size 1 should use HashSet1" + y.getClass)
+}
diff --git a/test/files/run/t6199-mirror.check b/test/files/run/t6199-mirror.check
index ec969b5b93..6a452c185a 100644
--- a/test/files/run/t6199-mirror.check
+++ b/test/files/run/t6199-mirror.check
@@ -1 +1 @@
-()
+()
diff --git a/test/files/run/t6199-toolbox.check b/test/files/run/t6199-toolbox.check
index ec969b5b93..6a452c185a 100644
--- a/test/files/run/t6199-toolbox.check
+++ b/test/files/run/t6199-toolbox.check
@@ -1 +1 @@
-()
+()
diff --git a/test/files/run/t6199-toolbox.scala b/test/files/run/t6199-toolbox.scala
index 14670f8e21..89015f5878 100644
--- a/test/files/run/t6199-toolbox.scala
+++ b/test/files/run/t6199-toolbox.scala
@@ -4,5 +4,5 @@ import scala.tools.reflect.ToolBox
object Test extends App {
val tb = cm.mkToolBox()
- println(tb.runExpr(Literal(Constant(()))))
+ println(tb.eval(Literal(Constant(()))))
} \ No newline at end of file
diff --git a/test/files/run/t6220.scala b/test/files/run/t6220.scala
new file mode 100644
index 0000000000..834b692f43
--- /dev/null
+++ b/test/files/run/t6220.scala
@@ -0,0 +1,92 @@
+import scala.collection.immutable._
+
+object Test extends App {
+
+ // finds an int x such that improved(x) differs in the first bit to improved(0),
+ // which is the worst case for the HashTrieSet
+ def findWorstCaseInts() {
+ // copy of improve from HashSet
+ def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
+ }
+
+ // find two hashes which have a large separation
+ val x = 0
+ var y = 1
+ val ix = improve(x)
+ while(y!=0 && improve(y)!=ix+(1<<31))
+ y+=1
+ printf("%s %s %x %x\n",x,y,improve(x), improve(y))
+ }
+ // this is not done every test run since it would slow down ant test.suite too much.
+ // findWorstCaseInts()
+
+ // two numbers that are immediately adiacent when fed through HashSet.improve
+ val h0 = 0
+ val h1 = 1270889724
+
+ // h is the hashcode, i is ignored for the hashcode but relevant for equality
+ case class Collision(h:Int, i:Int) {
+ override def hashCode = h
+ }
+ val a = Collision(h0,0)
+ val b = Collision(h0,1)
+ val c = Collision(h1,0)
+
+ // create a HashSetCollision1
+ val x = HashSet(a) + b
+ if(x.getClass.getSimpleName != "HashSetCollision1")
+ println("x should be a collision")
+ StructureTests.validate(x)
+ // StructureTests.printStructure(x)
+ require(x.size==2 && x.contains(a) && x.contains(b))
+
+ // go from a HashSetCollision1 to a HashTrieSet with maximum depth
+ val y = x + c
+ if(y.getClass.getSimpleName != "HashTrieSet")
+ println("y should be a HashTrieSet")
+ StructureTests.validate(y)
+ // StructureTests.printStructure(y)
+ require(y.size==3 && y.contains(a) && y.contains(b) && y.contains(c))
+
+ // go from a HashSet1 directly to a HashTrieSet with maximum depth
+ val z = HashSet(a) + c
+ if(y.getClass.getSimpleName != "HashTrieSet")
+ println("y should be a HashTrieSet")
+ StructureTests.validate(z)
+ // StructureTests.printStructure(z)
+ require(z.size == 2 && z.contains(a) && z.contains(c))
+}
+
+package scala.collection.immutable {
+ object StructureTests {
+ def printStructure(x:HashSet[_], prefix:String="") {
+ x match {
+ case m:HashSet.HashTrieSet[_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.size)
+ m.elems.foreach(child => printStructure(child, prefix + " "))
+ case m:HashSet.HashSetCollision1[_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.ks.size)
+ case m:HashSet.HashSet1[_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.head)
+ case _ =>
+ println(prefix+"empty")
+ }
+ }
+
+ def validate(x:HashSet[_]) {
+ x match {
+ case m:HashSet.HashTrieSet[_] =>
+ require(m.elems.size>1 || (m.elems.size==1 && m.elems(0).isInstanceOf[HashSet.HashTrieSet[_]]))
+ m.elems.foreach(validate _)
+ case m:HashSet.HashSetCollision1[_] =>
+ require(m.ks.size>1)
+ case m:HashSet.HashSet1[_] =>
+ case _ =>
+ }
+ }
+ }
+}
diff --git a/test/files/run/t6223.check b/test/files/run/t6223.check
new file mode 100644
index 0000000000..90ec019407
--- /dev/null
+++ b/test/files/run/t6223.check
@@ -0,0 +1,4 @@
+bar
+bar$mcI$sp
+bar$mIc$sp
+bar$mIcI$sp
diff --git a/test/files/run/t6223.scala b/test/files/run/t6223.scala
new file mode 100644
index 0000000000..4ab7c832e6
--- /dev/null
+++ b/test/files/run/t6223.scala
@@ -0,0 +1,11 @@
+class Foo[@specialized(Int) A](a:A) {
+ def bar[@specialized(Int) B](f:A => B) = new Foo(f(a))
+}
+
+object Test {
+ def main(args:Array[String]) {
+ val f = new Foo(333)
+ val ms = f.getClass().getDeclaredMethods()
+ ms.foreach(m => println(m.getName))
+ }
+}
diff --git a/test/files/run/t6246.check b/test/files/run/t6246.check
new file mode 100644
index 0000000000..9532185ead
--- /dev/null
+++ b/test/files/run/t6246.check
@@ -0,0 +1,90 @@
+runtimeClass = byte, toString = Byte
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = short, toString = Short
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = char, toString = Char
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = int, toString = Int
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = long, toString = Long
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = float, toString = Float
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = double, toString = Double
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = void, toString = Unit
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = boolean, toString = Boolean
+true
+true
+true
+false
+true
+false
+false
+false
+false \ No newline at end of file
diff --git a/test/files/run/t6246.scala b/test/files/run/t6246.scala
new file mode 100644
index 0000000000..28765e1adf
--- /dev/null
+++ b/test/files/run/t6246.scala
@@ -0,0 +1,26 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def testValueClass(tag: ClassTag[_]) {
+ println(s"runtimeClass = ${tag.runtimeClass}, toString = ${tag.toString}")
+ println(tag <:< tag)
+ println(tag <:< ClassTag.AnyVal)
+ println(tag <:< ClassTag.Any)
+ println(tag <:< ClassTag.Nothing)
+ println(ClassTag.Nothing <:< tag)
+ println(tag <:< ClassTag.Null)
+ println(ClassTag.Null <:< tag)
+ println(tag <:< ClassTag.Object)
+ println(ClassTag.Object <:< tag)
+ }
+
+ testValueClass(ClassTag.Byte)
+ testValueClass(ClassTag.Short)
+ testValueClass(ClassTag.Char)
+ testValueClass(ClassTag.Int)
+ testValueClass(ClassTag.Long)
+ testValueClass(ClassTag.Float)
+ testValueClass(ClassTag.Double)
+ testValueClass(ClassTag.Unit)
+ testValueClass(ClassTag.Boolean)
+} \ No newline at end of file
diff --git a/test/files/run/t6260.check b/test/files/run/t6260.check
new file mode 100644
index 0000000000..54f98a10f0
--- /dev/null
+++ b/test/files/run/t6260.check
@@ -0,0 +1 @@
+Box(abcabc)
diff --git a/test/files/run/t6260.scala b/test/files/run/t6260.scala
new file mode 100644
index 0000000000..cfe9e1e640
--- /dev/null
+++ b/test/files/run/t6260.scala
@@ -0,0 +1,12 @@
+class Box[X <: CharSequence](val x: X) extends AnyVal {
+ def map[Y <: CharSequence](f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+ override def toString = s"Box($x)"
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val g = (x: String) => x + x
+ println(new Box("abc") map g)
+ }
+}
diff --git a/test/files/run/t6261.scala b/test/files/run/t6261.scala
new file mode 100644
index 0000000000..b4463256c9
--- /dev/null
+++ b/test/files/run/t6261.scala
@@ -0,0 +1,130 @@
+import scala.collection.immutable._
+
+object Test extends App {
+
+ def test0() {
+ val m=ListMap(1->2,3->4)
+ if(m.tail ne m.tail)
+ println("ListMap.tail uses a builder, so it is not O(1)")
+ }
+
+ def test1() {
+ // test that a HashTrieMap with one leaf element is not created!
+ val x = HashMap.empty + (1->1) + (2->2)
+ if(x.getClass.getSimpleName != "HashTrieMap")
+ println("A hash map containing two non-colliding values should be a HashTrieMap")
+
+ val y = x - 1
+ if(y.getClass.getSimpleName != "HashMap1")
+ println("A hash map containing one element should always use HashMap1")
+ }
+
+ def test2() {
+ // class that always causes hash collisions
+ case class Collision(value:Int) { override def hashCode = 0 }
+
+ // create a set that should have a collison
+ val x = HashMap.empty + (Collision(0)->0) + (Collision(1) ->0)
+ if(x.getClass.getSimpleName != "HashMapCollision1")
+ println("HashMap of size >1 with collisions should use HashMapCollision")
+
+ // remove the collision again by removing all but one element
+ val y = x - Collision(0)
+ if(y.getClass.getSimpleName != "HashMap1")
+ println("HashMap of size 1 should use HashMap1" + y.getClass)
+ }
+ def test3() {
+ // finds an int x such that improved(x) differs in the first bit to improved(0),
+ // which is the worst case for the HashTrieSet
+ def findWorstCaseInts() {
+ // copy of improve from HashSet
+ def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
+ }
+
+ // find two hashes which have a large separation
+ val x = 0
+ var y = 1
+ val ix = improve(x)
+ while(y!=0 && improve(y)!=ix+(1<<31))
+ y+=1
+ printf("%s %s %x %x\n",x,y,improve(x), improve(y))
+ }
+ // this is not done every test run since it would slow down ant test.suite too much.
+ // findWorstCaseInts()
+
+ // two numbers that are immediately adiacent when fed through HashSet.improve
+ val h0 = 0
+ val h1 = 1270889724
+
+ // h is the hashcode, i is ignored for the hashcode but relevant for equality
+ case class Collision(h:Int, i:Int) {
+ override def hashCode = h
+ }
+ val a = Collision(h0,0)->0
+ val b = Collision(h0,1)->0
+ val c = Collision(h1,0)->0
+
+ // create a HashSetCollision1
+ val x = HashMap(a) + b
+ if(x.getClass.getSimpleName != "HashMapCollision1")
+ println("x should be a HashMapCollision")
+ StructureTests.validate(x)
+ //StructureTests.printStructure(x)
+ require(x.size==2 && x.contains(a._1) && x.contains(b._1))
+
+ // go from a HashSetCollision1 to a HashTrieSet with maximum depth
+ val y = x + c
+ if(y.getClass.getSimpleName != "HashTrieMap")
+ println("y should be a HashTrieMap")
+ StructureTests.validate(y)
+ // StructureTests.printStructure(y)
+ require(y.size==3 && y.contains(a._1) && y.contains(b._1) && y.contains(c._1))
+
+ // go from a HashSet1 directly to a HashTrieSet with maximum depth
+ val z = HashMap(a) + c
+ if(y.getClass.getSimpleName != "HashTrieMap")
+ println("y should be a HashTrieMap")
+ StructureTests.validate(z)
+ // StructureTests.printStructure(z)
+ require(z.size == 2 && z.contains(a._1) && z.contains(c._1))
+ }
+ test0()
+ test1()
+ test2()
+ test3()
+}
+
+
+package scala.collection.immutable {
+ object StructureTests {
+ def printStructure(x:HashMap[_,_], prefix:String="") {
+ x match {
+ case m:HashMap.HashTrieMap[_,_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.size)
+ m.elems.foreach(child => printStructure(child, prefix + " "))
+ case m:HashMap.HashMapCollision1[_,_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.kvs.size)
+ case m:HashMap.HashMap1[_,_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.head)
+ case _ =>
+ println(prefix+"empty")
+ }
+ }
+
+ def validate(x:HashMap[_,_]) {
+ x match {
+ case m:HashMap.HashTrieMap[_,_] =>
+ require(m.elems.size>1 || (m.elems.size==1 && m.elems(0).isInstanceOf[HashMap.HashTrieMap[_,_]]))
+ m.elems.foreach(validate _)
+ case m:HashMap.HashMapCollision1[_,_] =>
+ require(m.kvs.size>1)
+ case m:HashMap.HashMap1[_,_] =>
+ case _ =>
+ }
+ }
+ }
+}
diff --git a/test/files/run/t6271.scala b/test/files/run/t6271.scala
new file mode 100644
index 0000000000..8ebf7ad8b5
--- /dev/null
+++ b/test/files/run/t6271.scala
@@ -0,0 +1,32 @@
+object Test extends App {
+ def filterIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x filter (_ > 0) ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def takenIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x take 0 ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def droppedIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x drop 1 ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def flatMappedIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x flatMap (_ => List()) ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def slicedIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x slice (2,3) ) }
+ filtered.iterator.toIterable.flatten
+ }
+ filterIssue
+ takenIssue
+ droppedIssue
+ flatMappedIssue
+ slicedIssue
+}
diff --git a/test/files/run/t6272.check b/test/files/run/t6272.check
new file mode 100644
index 0000000000..f00c965d83
--- /dev/null
+++ b/test/files/run/t6272.check
@@ -0,0 +1,10 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
diff --git a/test/files/run/t6272.scala b/test/files/run/t6272.scala
new file mode 100644
index 0000000000..174436919b
--- /dev/null
+++ b/test/files/run/t6272.scala
@@ -0,0 +1,62 @@
+// x1, x2, and x3 resulted in: symbol variable bitmap$0 does not exist in A.<init>
+object A {
+
+ try {
+ lazy val x1 = 1
+ println(x1)
+ sys.error("!")
+ } catch {
+ case _: Throwable =>
+ lazy val x2 = 2
+ println(x2)
+ } finally {
+ lazy val x3 = 3
+ println(x3)
+ }
+
+ if ("".isEmpty) {
+ lazy val x4 = 4
+ println(x4)
+ }
+
+ var b = true
+ while(b) {
+ lazy val x5 = 5
+ println(x5)
+ b = false
+ }
+
+
+ def method {
+ try {
+ lazy val x6 = 6
+ println(x6)
+ sys.error("!")
+ } catch {
+ case _: Throwable =>
+ lazy val x7 = 7
+ println(x7)
+ } finally {
+ lazy val x8 = 8
+ println(x8)
+ }
+
+ if ("".isEmpty) {
+ lazy val x9 = 9
+ println(x9)
+ }
+
+ var b = true
+ while(b) {
+ lazy val x10 = 10
+ println(x10)
+ b = false
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ A.method
+ }
+}
diff --git a/test/files/run/t6273.check b/test/files/run/t6273.check
new file mode 100644
index 0000000000..c1c18daac2
--- /dev/null
+++ b/test/files/run/t6273.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> val y = 55
+y: Int = 55
+
+scala> val x = s"""
+ y = $y
+"""
+x: String =
+"
+ y = 55
+"
+
+scala>
+
+scala>
diff --git a/test/files/run/t6273.scala b/test/files/run/t6273.scala
new file mode 100644
index 0000000000..ed0fd452e0
--- /dev/null
+++ b/test/files/run/t6273.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def tq = "\"\"\""
+ def code = s"""
+val y = 55
+val x = s$tq
+ y = $$y
+$tq
+ """
+}
diff --git a/test/files/run/t6277.check b/test/files/run/t6277.check
new file mode 100644
index 0000000000..f32a5804e2
--- /dev/null
+++ b/test/files/run/t6277.check
@@ -0,0 +1 @@
+true \ No newline at end of file
diff --git a/test/files/run/t6277.scala b/test/files/run/t6277.scala
new file mode 100644
index 0000000000..41feee8a8a
--- /dev/null
+++ b/test/files/run/t6277.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ locally {
+ val sym = typeOf[List[_]].typeSymbol.asClass
+ val q = sym.isSealed
+ println(q)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6287.check b/test/files/run/t6287.check
new file mode 100644
index 0000000000..a86ecbee42
--- /dev/null
+++ b/test/files/run/t6287.check
@@ -0,0 +1,3 @@
+Vector(2, 3, 4)
+Vector(2, 3, 4)
+Vector(2, 3, 4)
diff --git a/test/files/run/t6287.scala b/test/files/run/t6287.scala
new file mode 100644
index 0000000000..0c75d1081b
--- /dev/null
+++ b/test/files/run/t6287.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect._
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val t1 = tb.parse("1 to 3 map (_+1)")
+ println(tb.eval(t1))
+ println(tb.eval(t1))
+ println(tb.eval(t1))
+} \ No newline at end of file
diff --git a/test/files/run/t6290.scala b/test/files/run/t6290.scala
new file mode 100644
index 0000000000..9d05db0d18
--- /dev/null
+++ b/test/files/run/t6290.scala
@@ -0,0 +1,4 @@
+object Test {
+ implicit val foo = language.dynamics
+ def main(args: Array[String]): Unit = ()
+}
diff --git a/test/files/run/t6292.scala b/test/files/run/t6292.scala
new file mode 100644
index 0000000000..51e31f95fc
--- /dev/null
+++ b/test/files/run/t6292.scala
@@ -0,0 +1,18 @@
+ import scala.collection.mutable.DoubleLinkedList
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ cloneAndtest(DoubleLinkedList[Int]())
+ cloneAndtest(DoubleLinkedList[Int](1))
+ cloneAndtest(DoubleLinkedList[Int](1,2,3,4))
+ }
+
+ def cloneAndtest(l: DoubleLinkedList[Int]): Unit =
+ testSame(l, l.clone.asInstanceOf[DoubleLinkedList[Int]])
+
+ def testSame(one: DoubleLinkedList[Int], two: DoubleLinkedList[Int]): Unit = {
+ def msg = s" for ${one} and ${two} !"
+ assert(one.size == two.size, s"Cloned sizes are not the same $msg!")
+ assert(one == two, s"Cloned lists are not equal $msg")
+ }
+}
diff --git a/test/files/run/t6318_derived.check b/test/files/run/t6318_derived.check
new file mode 100644
index 0000000000..926f2a4ba2
--- /dev/null
+++ b/test/files/run/t6318_derived.check
@@ -0,0 +1,3 @@
+Some(X)
+true
+Some(X)
diff --git a/test/files/run/t6318_derived.scala b/test/files/run/t6318_derived.scala
new file mode 100644
index 0000000000..ccdc18daee
--- /dev/null
+++ b/test/files/run/t6318_derived.scala
@@ -0,0 +1,15 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def test[T: ClassTag](x: T) {
+ println(classTag[T].runtimeClass.isAssignableFrom(x.getClass))
+ println(classTag[T].unapply(x))
+ }
+
+ class X(val x: Int) extends AnyVal { override def toString = "X" }
+ val x = new X(1)
+ // the commented line crashes because of SI-6326
+ //println(classTag[X].runtimeClass.isAssignableFrom(x.getClass))
+ println(classTag[X].unapply(x))
+ test(x)
+} \ No newline at end of file
diff --git a/test/files/run/t6318_primitives.check b/test/files/run/t6318_primitives.check
new file mode 100644
index 0000000000..b330f91276
--- /dev/null
+++ b/test/files/run/t6318_primitives.check
@@ -0,0 +1,36 @@
+true
+Some(1)
+false
+None
+true
+Some(1)
+false
+None
+true
+Some()
+false
+None
+true
+Some(1)
+false
+None
+true
+Some(1)
+false
+None
+true
+Some(1.0)
+false
+None
+true
+Some(1.0)
+false
+None
+true
+Some(true)
+false
+None
+true
+Some(())
+false
+None
diff --git a/test/files/run/t6318_primitives.scala b/test/files/run/t6318_primitives.scala
new file mode 100644
index 0000000000..30f27120b3
--- /dev/null
+++ b/test/files/run/t6318_primitives.scala
@@ -0,0 +1,71 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def test[T: ClassTag](x: T) {
+ println(classTag[T].runtimeClass.isAssignableFrom(x.getClass))
+ println(classTag[T].unapply(x))
+ }
+
+ {
+ val x = 1.toByte
+ println(ClassTag.Byte.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Byte.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toShort
+ println(ClassTag.Short.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Short.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toChar
+ println(ClassTag.Char.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Char.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toInt
+ println(ClassTag.Int.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Int.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toLong
+ println(ClassTag.Long.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Long.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toFloat
+ println(ClassTag.Float.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Float.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toDouble
+ println(ClassTag.Double.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Double.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = true
+ println(ClassTag.Boolean.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Boolean.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = ()
+ println(ClassTag.Unit.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Unit.unapply(x))
+ test(x)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6323b.check b/test/files/run/t6323b.check
new file mode 100644
index 0000000000..d6b1d1fd90
--- /dev/null
+++ b/test/files/run/t6323b.check
@@ -0,0 +1 @@
+cannot reflect value a, because it's a member of a weak type Test
diff --git a/test/files/run/t6323b.scala b/test/files/run/t6323b.scala
new file mode 100644
index 0000000000..f530ac3ecc
--- /dev/null
+++ b/test/files/run/t6323b.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => m}
+import scala.reflect.runtime.{universe => u}
+
+object Test extends App {
+ locally {
+ try {
+ case class Test(a:String,b:List[Int])
+
+ val lookAtMe = m.reflect(Test("a",List(5)))
+ val value = u.weakTypeOf[Test]
+ val members = value.members
+ val member = value.members.filter(_.name.encoded == "a")
+ val aAccessor = lookAtMe.reflectMethod(member.head.asMethod)
+ val thisShouldBeA = aAccessor.apply()
+ println(thisShouldBeA)
+ } catch {
+ case ScalaReflectionException(msg) => println(msg)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6327.check b/test/files/run/t6327.check
new file mode 100644
index 0000000000..f7bacac931
--- /dev/null
+++ b/test/files/run/t6327.check
@@ -0,0 +1,4 @@
+A
+A
+A
+A
diff --git a/test/files/run/t6327.scala b/test/files/run/t6327.scala
new file mode 100644
index 0000000000..7683101f14
--- /dev/null
+++ b/test/files/run/t6327.scala
@@ -0,0 +1,22 @@
+import language._
+
+object Test extends App {
+
+ case class R[+T](s: String) { def x() = println(s) }
+
+ // Implicits in contention; StringR is nested to avoid ambiguity
+ object R { implicit val StringR = R[String]("A") }
+ implicit val Default = R[Any]("B")
+
+ class B() extends Dynamic {
+ def selectDynamic[T](f: String)(implicit r: R[T]): Unit = r.x()
+ }
+
+ val b = new B()
+
+ // These should all produce the same output, but they don't
+ b.selectDynamic[String]("baz")
+ b.baz[String]
+ val c = b.selectDynamic[String]("baz")
+ val d = b.baz[String]
+}
diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check
new file mode 100644
index 0000000000..8663184bde
--- /dev/null
+++ b/test/files/run/t6329_repl.check
@@ -0,0 +1,13 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> classManifest[List[_]]
+warning: there were 1 deprecation warnings; re-run with -deprecation for details
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+
+scala> scala.reflect.classTag[List[_]]
+res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+
+scala>
diff --git a/test/files/run/t6329_repl.scala b/test/files/run/t6329_repl.scala
new file mode 100644
index 0000000000..add6d64962
--- /dev/null
+++ b/test/files/run/t6329_repl.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |classManifest[List[_]]
+ |scala.reflect.classTag[List[_]]
+ |""".stripMargin
+}
diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check
new file mode 100644
index 0000000000..8663184bde
--- /dev/null
+++ b/test/files/run/t6329_repl_bug.check
@@ -0,0 +1,13 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> classManifest[List[_]]
+warning: there were 1 deprecation warnings; re-run with -deprecation for details
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+
+scala> scala.reflect.classTag[List[_]]
+res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+
+scala>
diff --git a/test/files/run/t6329_repl_bug.pending b/test/files/run/t6329_repl_bug.pending
new file mode 100644
index 0000000000..9997d1771e
--- /dev/null
+++ b/test/files/run/t6329_repl_bug.pending
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.reflect.runtime.universe._
+ |import scala.reflect.runtime._
+ |classManifest[List[_]]
+ |scala.reflect.classTag[List[_]]
+ |""".stripMargin
+}
diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check
new file mode 100644
index 0000000000..8282afaeba
--- /dev/null
+++ b/test/files/run/t6329_vanilla.check
@@ -0,0 +1,2 @@
+scala.collection.immutable.List[Any]
+scala.collection.immutable.List
diff --git a/test/files/run/t6329_vanilla.scala b/test/files/run/t6329_vanilla.scala
new file mode 100644
index 0000000000..a31cd5c72e
--- /dev/null
+++ b/test/files/run/t6329_vanilla.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(classManifest[List[_]])
+ println(scala.reflect.classTag[List[_]])
+} \ No newline at end of file
diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check
new file mode 100644
index 0000000000..8282afaeba
--- /dev/null
+++ b/test/files/run/t6329_vanilla_bug.check
@@ -0,0 +1,2 @@
+scala.collection.immutable.List[Any]
+scala.collection.immutable.List
diff --git a/test/files/run/t6329_vanilla_bug.pending b/test/files/run/t6329_vanilla_bug.pending
new file mode 100644
index 0000000000..404f90bf6e
--- /dev/null
+++ b/test/files/run/t6329_vanilla_bug.pending
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime._
+
+object Test extends App {
+ println(classManifest[List[_]])
+ println(scala.reflect.classTag[List[_]])
+} \ No newline at end of file
diff --git a/test/files/run/t6331.check b/test/files/run/t6331.check
new file mode 100644
index 0000000000..9bf3f7823a
--- /dev/null
+++ b/test/files/run/t6331.check
@@ -0,0 +1,23 @@
+ () == ()
+ true == true
+ true != false
+ false != true
+ 0.toByte == 0.toByte
+ 0.toByte != 1.toByte
+ 0.toShort == 0.toShort
+ 0.toShort != 1.toShort
+ 0 == 0
+ 0 != 1
+ 0L == 0L
+ 0L != 1L
+ 0.0f == 0.0f
+ 0.0f != -0.0f
+ -0.0f != 0.0f
+ NaNf == NaNf
+ 0.0d == 0.0d
+ 0.0d != -0.0d
+ -0.0d != 0.0d
+ NaNd == NaNd
+ 0 != 0.0d
+ 0 != 0L
+ 0.0d != 0.0f
diff --git a/test/files/run/t6331.scala b/test/files/run/t6331.scala
new file mode 100644
index 0000000000..4e43a7686e
--- /dev/null
+++ b/test/files/run/t6331.scala
@@ -0,0 +1,71 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+// Test of Constant#equals, which must must account for floating point intricacies.
+object Test extends DirectTest {
+
+ override def code = ""
+
+ override def show() {
+ val global = newCompiler()
+ import global._
+
+ def check(c1: Any, c2: Any): Unit = {
+ val const1 = Constant(c1)
+ val const2 = Constant(c2)
+ val equal = const1 == const2
+ def show(a: Any) = "" + a + (a match {
+ case _: Byte => ".toByte"
+ case _: Short => ".toShort"
+ case _: Long => "L"
+ case _: Float => "f"
+ case _: Double => "d"
+ case _ => ""
+ })
+ val op = if (equal) "==" else "!="
+ println(f"${show(c1)}%12s $op ${show(c2)}")
+
+ val hash1 = const1.hashCode
+ val hash2 = const2.hashCode
+ val hashesEqual = hash1 == hash2
+ val hashBroken = equal && !hashesEqual
+ if (hashBroken) println(f"$hash1%12s != $hash2 // hash codes differ for equal objects!!")
+ }
+
+ check((), ())
+
+ check(true, true)
+ check(true, false)
+ check(false, true)
+
+ check(0.toByte, 0.toByte)
+ check(0.toByte, 1.toByte)
+
+ check(0.toShort, 0.toShort)
+ check(0.toShort, 1.toShort)
+
+ check(0, 0)
+ check(0, 1)
+
+ check(0L, 0L)
+ check(0L, 1L)
+
+ check(0f, 0f)
+ check(0f, -0f)
+ check(-0f, 0f)
+ check(Float.NaN, Float.NaN)
+
+ check(0d, 0d)
+ check(0d, -0d)
+ check(-0d, 0d)
+ check(Double.NaN, Double.NaN)
+
+ check(0, 0d)
+ check(0, 0L)
+ check(0d, 0f)
+ }
+}
diff --git a/test/files/run/t6331b.check b/test/files/run/t6331b.check
new file mode 100644
index 0000000000..6ca09e3814
--- /dev/null
+++ b/test/files/run/t6331b.check
@@ -0,0 +1,30 @@
+trace> if (Test.this.t)
+ -0.0
+else
+ 0.0
+res: Double = -0.0
+
+trace> if (Test.this.t)
+ 0.0
+else
+ -0.0
+res: Double = 0.0
+
+trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+ -0.0
+else
+ 0.0)
+res: Any = class scala.NotImplementedError
+
+trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+ 0.0
+else
+ 0.0)
+res: Any = class scala.NotImplementedError
+
+trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+ ()
+else
+ ())
+res: Any = class scala.NotImplementedError
+
diff --git a/test/files/run/t6331b.scala b/test/files/run/t6331b.scala
new file mode 100644
index 0000000000..f966abea51
--- /dev/null
+++ b/test/files/run/t6331b.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+import scala.tools.partest.trace
+import scala.util.control.Exception._
+
+
+object Test extends App {
+ def intercept = allCatch.withApply(_.getClass)
+ val t: Boolean = true
+ trace(if (t) -0d else 0d)
+ trace(if (t) 0d else -0d)
+ trace(intercept(if (???) -0d else 0d))
+ trace(intercept(if (???) 0d else 0d))
+ trace(intercept(if (???) () else ()))
+}
diff --git a/test/files/run/t6333.scala b/test/files/run/t6333.scala
new file mode 100644
index 0000000000..266d95ce69
--- /dev/null
+++ b/test/files/run/t6333.scala
@@ -0,0 +1,29 @@
+object Test extends App {
+ import util.Try
+
+ val a = "apple"
+ def fail: String = throw new Exception("Fail!")
+ def argh: Try[String] = throw new Exception("Argh!")
+
+ // No throw tests
+ def tryMethods(expr: => String): Unit = {
+ Try(expr) orElse argh
+ Try(expr).transform(_ => argh, _ => argh)
+ Try(expr).recoverWith { case e if (a == fail) => Try(a) }
+ Try(expr).recoverWith { case _ => argh }
+ Try(expr).getOrElse(a)
+ // TODO - Fail getOrElse?
+ Try(expr) orElse argh
+ Try(expr) orElse Try(a)
+ Try(expr) map (_ => fail)
+ Try(expr) map (_ => a)
+ Try(expr) flatMap (_ => argh)
+ Try(expr) flatMap (_ => Try(a))
+ Try(expr) filter (_ => throw new Exception("O NOES"))
+ Try(expr) filter (_ => true)
+ Try(expr) recover { case _ => fail }
+ Try(expr).failed
+ }
+ tryMethods(a)
+ tryMethods(fail)
+}
diff --git a/test/files/run/t6337a.scala b/test/files/run/t6337a.scala
new file mode 100644
index 0000000000..f5490f5cf0
--- /dev/null
+++ b/test/files/run/t6337a.scala
@@ -0,0 +1,16 @@
+object Test {
+ def main(args: Array[String]) {
+ val x = X(XX(3))
+ assert(x.q.x.x + 9 == 13)
+ }
+}
+trait Q extends Any {
+ def x: Int
+ def inc: XX
+}
+case class X(val x: Q) extends AnyVal {
+ def q = X(x.inc)
+}
+case class XX(val x: Int) extends AnyVal with Q {
+ def inc = XX(x + 1)
+}
diff --git a/test/files/run/t6344.check b/test/files/run/t6344.check
new file mode 100644
index 0000000000..5ac04d0320
--- /dev/null
+++ b/test/files/run/t6344.check
@@ -0,0 +1,132 @@
+C0
+public int C0.v1(int)
+public <A> int C0.v1(int)
+public int C0.v3()
+public <A> int C0.v3()
+public int C0.v4(int,scala.collection.immutable.List)
+public <A> int C0.v4(int,scala.collection.immutable.List<Val<A>>)
+public scala.collection.immutable.List C0.v2()
+public <A> scala.collection.immutable.List<Val<A>> C0.v2()
+
+C1
+public java.lang.Object C1.v1(java.lang.Object)
+public <A> java.lang.Object C1.v1(java.lang.Object)
+public java.lang.Object C1.v3()
+public <A> java.lang.Object C1.v3()
+public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List)
+public <A> java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List<java.lang.Object>)
+public scala.collection.immutable.List C1.v2()
+public <A> scala.collection.immutable.List<java.lang.Object> C1.v2()
+
+C2
+public java.lang.String C2.v1(java.lang.String)
+public <A> java.lang.String C2.v1(java.lang.String)
+public java.lang.String C2.v3()
+public <A> java.lang.String C2.v3()
+public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List)
+public <A> java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List<java.lang.String>)
+public scala.collection.immutable.List C2.v2()
+public <A> scala.collection.immutable.List<java.lang.String> C2.v2()
+
+C3
+public java.lang.Object C3.v1(java.lang.Object)
+public A C3.v1(A)
+public java.lang.Object C3.v3()
+public A C3.v3()
+public java.lang.Object C3.v4(java.lang.Object,scala.collection.immutable.List)
+public A C3.v4(A,scala.collection.immutable.List<A>)
+public java.lang.Object C3.x()
+public A C3.x()
+public scala.collection.immutable.List C3.v2()
+public scala.collection.immutable.List<A> C3.v2()
+
+C4
+public java.lang.Integer C4.v1(java.lang.Integer)
+public int C4.v1(int)
+public java.lang.Integer C4.v3()
+public int C4.v3()
+public java.lang.Integer C4.v4(java.lang.Integer,scala.collection.immutable.List)
+public int C4.v4(int,scala.collection.immutable.List<ValA<java.lang.Object>>)
+public scala.collection.immutable.List C4.v2()
+public scala.collection.immutable.List<ValA<java.lang.Object>> C4.v2()
+
+C4B
+public java.lang.String C4B.v1(java.lang.String)
+public java.lang.String C4B.v1(java.lang.String)
+public java.lang.String C4B.v3()
+public java.lang.String C4B.v3()
+public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List)
+public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List<java.lang.String>)
+public scala.collection.immutable.List C4B.v2()
+public scala.collection.immutable.List<java.lang.String> C4B.v2()
+
+C5
+public double C5.f2(int,java.lang.Object,java.lang.String,double)
+public double C5.f2(int,java.lang.Object,java.lang.String,double)
+public int C5.f3(java.lang.Integer)
+public int C5.f3(int)
+public int C5.f4(java.lang.Integer)
+public int C5.f4(int)
+public int C5.f5(java.lang.Integer)
+public int C5.f5(int)
+public java.lang.Object C5.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+public <A> A C5.f1(int,java.lang.Object,java.lang.String,A)
+
+C6
+public java.lang.Object C6.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+public A C6.f1(int,java.lang.Object,java.lang.String,A)
+
+C7
+public java.lang.Integer C7.f1(int,java.lang.Object,java.lang.String,java.lang.Integer)
+public int C7.f1(int,java.lang.Object,java.lang.String,int)
+public java.lang.Object C7.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+public java.lang.Object C7.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+
+Gen
+public abstract Gen Gen.plus(Gen,Gen)
+public abstract Gen<A> Gen.plus(Gen<A>,Gen<A>)
+public abstract java.lang.Object Gen.x()
+public abstract A Gen.x()
+
+ValueInt
+public Gen ValueInt.plus(Gen,Gen)
+public Gen<java.lang.Object> ValueInt.plus(Gen<java.lang.Object>,Gen<java.lang.Object>)
+public boolean ValueInt.equals(java.lang.Object)
+public boolean ValueInt.equals(java.lang.Object)
+public int ValueInt.hashCode()
+public int ValueInt.hashCode()
+public int ValueInt.iplus(int,int)
+public int ValueInt.iplus(int,int)
+public int ValueInt.x()
+public int ValueInt.x()
+public java.lang.Object ValueInt.x()
+public java.lang.Object ValueInt.x()
+public static Gen ValueInt.extension$plus(int,Gen,Gen)
+public static Gen<java.lang.Object> ValueInt.extension$plus(int,Gen<java.lang.Object>,Gen<java.lang.Object>)
+public static boolean ValueInt.extension$equals(int,java.lang.Object)
+public static boolean ValueInt.extension$equals(int,java.lang.Object)
+public static int ValueInt.extension$hashCode(int)
+public static int ValueInt.extension$hashCode(int)
+public static int ValueInt.extension$iplus(int,int,int)
+public static int ValueInt.extension$iplus(int,int,int)
+
+RefInt
+public Gen RefInt.plus(Gen,Gen)
+public Gen<java.lang.Object> RefInt.plus(Gen<java.lang.Object>,Gen<java.lang.Object>)
+public RefInt RefInt.rplus(RefInt,RefInt)
+public RefInt RefInt.rplus(RefInt,RefInt)
+public int RefInt.x()
+public int RefInt.x()
+public java.lang.Object RefInt.x()
+public java.lang.Object RefInt.x()
+
+RefInteger
+public Gen RefInteger.plus(Gen,Gen)
+public Gen<java.lang.Integer> RefInteger.plus(Gen<java.lang.Integer>,Gen<java.lang.Integer>)
+public RefInteger RefInteger.bplus(RefInteger,RefInteger)
+public RefInteger RefInteger.bplus(RefInteger,RefInteger)
+public java.lang.Integer RefInteger.x()
+public java.lang.Integer RefInteger.x()
+public java.lang.Object RefInteger.x()
+public java.lang.Object RefInteger.x()
+
diff --git a/test/files/run/t6344.scala b/test/files/run/t6344.scala
new file mode 100644
index 0000000000..6f82e4ba51
--- /dev/null
+++ b/test/files/run/t6344.scala
@@ -0,0 +1,106 @@
+import scala.reflect.{ClassTag, classTag}
+import java.lang.Integer
+
+trait Gen[A] extends Any {
+ def x: A
+ def plus(x1: Gen[A], x2: Gen[A]): Gen[A]
+}
+class ValueInt(val x: Int) extends AnyVal with Gen[Int] {
+ // Gen<java.lang.Object> ValueInt.extension$plus(int,Gen<java.lang.Object>,Gen<java.lang.Object>)
+ def plus(x1: Gen[Int], x2: Gen[Int]): Gen[Int] = new ValueInt(x + x1.x + x2.x)
+ // int ValueInt.extension$iplus(int,int,int)
+ def iplus(x1: ValueInt, x2: ValueInt): ValueInt = new ValueInt(x + x1.x + x2.x)
+}
+class RefInt(val x: Int) extends AnyRef with Gen[Int] {
+ def plus(x1: Gen[Int], x2: Gen[Int]): Gen[Int] = new RefInt(x + x1.x + x2.x)
+ def rplus(x1: RefInt, x2: RefInt): RefInt = new RefInt(x + x1.x + x2.x)
+}
+class RefInteger(val x: java.lang.Integer) extends AnyRef with Gen[Integer] {
+ def plus(x1: Gen[Integer], x2: Gen[Integer]): Gen[Integer] = new RefInteger(x + x1.x + x2.x)
+ def bplus(x1: RefInteger, x2: RefInteger): RefInteger = new RefInteger(x + x1.x + x2.x)
+}
+
+class Val[Q](val value: Int) extends AnyVal
+class ValAny[Q](val value: Any) extends AnyVal
+class ValStr[Q](val value: String) extends AnyVal
+class ValA[Q](val value: Q) extends AnyVal {
+ def f: Q = ???
+}
+class ValB[Q, Q0 <: Q](val value: Q) extends AnyVal {
+ def f: Q0 = ???
+}
+
+class C0 {
+ def v1[A](in: Val[A]) = in
+ def v2[A]: List[Val[A]] = Nil
+ def v3[A]: Val[A] = new Val[A](0)
+ def v4[A <: String](x: Val[A], ys: List[Val[A]]) = ys.head
+}
+class C1 {
+ def v1[A](in: ValAny[A]) = in
+ def v2[A]: List[ValAny[A]] = Nil
+ def v3[A]: ValAny[A] = new ValAny[A]("")
+ def v4[A <: String](x: ValAny[A], ys: List[ValAny[A]]) = ys.head
+}
+class C2 {
+ def v1[A](in: ValStr[A]) = in
+ def v2[A]: List[ValStr[A]] = Nil
+ def v3[A]: ValStr[A] = new ValStr[A]("")
+ def v4[A <: String](x: ValStr[A], ys: List[ValStr[A]]) = ys.head
+}
+class C3[A](val x: A) {
+ def v1(in: ValA[A]) = in
+ def v2: List[ValA[A]] = Nil
+ def v3: ValA[A] = new ValA[A](x)
+ def v4(x: ValA[A], ys: List[ValA[A]]) = ys.head
+}
+class C4 {
+ def v1(in: ValA[Int]) = in
+ def v2: List[ValA[Int]] = Nil
+ def v3: ValA[Int] = new ValA(1)
+ def v4(x: ValA[Int], ys: List[ValA[Int]]) = ys.head
+}
+class C4B {
+ def v1(in: ValA[String]) = in
+ def v2: List[ValA[String]] = Nil
+ def v3: ValA[String] = new ValA("")
+ def v4(x: ValA[String], ys: List[ValA[String]]) = ys.head
+}
+class C5 {
+ def f1[A](x1: Val[A], x2: ValAny[A], x3: ValStr[A], x4: ValA[A]) = x4
+ def f2(x1: Int, x2: Any, x3: String, x4: Double) = x4
+ def f3(x: ValA[Int]) = x.f
+ def f4(x: ValB[Int, Int]) = x.f
+ def f5(x: ValB[Int, _ <: Int]) = x.f
+}
+class C6[A] {
+ def f1(x1: Val[A], x2: ValAny[A], x3: ValStr[A], x4: ValA[A]) = x4
+}
+class C7 extends C6[Int] {
+ override def f1(x1: Val[Int], x2: ValAny[Int], x3: ValStr[Int], x4: ValA[Int]) =
+ super.f1(x1, x2, x3, x4)
+}
+
+object Test {
+ def show[A: ClassTag] = {
+ println(classTag[A].runtimeClass.getName)
+ classTag[A].runtimeClass.getDeclaredMethods.toList.sortBy(_.toString).flatMap(m => List(m.toString, m.toGenericString)) foreach println
+ println("")
+ }
+
+ def main(args: Array[String]): Unit = {
+ show[C0]
+ show[C1]
+ show[C2]
+ show[C3[_]]
+ show[C4]
+ show[C4B]
+ show[C5]
+ show[C6[_]]
+ show[C7]
+ show[Gen[_]]
+ show[ValueInt]
+ show[RefInt]
+ show[RefInteger]
+ }
+}
diff --git a/test/files/run/t6353.check b/test/files/run/t6353.check
new file mode 100644
index 0000000000..5676bed245
--- /dev/null
+++ b/test/files/run/t6353.check
@@ -0,0 +1 @@
+applyDynamic(apply)(9)
diff --git a/test/files/run/t6353.scala b/test/files/run/t6353.scala
new file mode 100644
index 0000000000..112241a3c4
--- /dev/null
+++ b/test/files/run/t6353.scala
@@ -0,0 +1,12 @@
+import language.dynamics
+
+object Test extends App {
+ val x = new X(3)
+ val y = x(9)
+ class X(i: Int) extends Dynamic {
+ def applyDynamic(name: String)(in: Int): Int = {
+ println(s"applyDynamic($name)($in)")
+ i + in
+ }
+ }
+}
diff --git a/test/files/run/t6392a.check b/test/files/run/t6392a.check
new file mode 100644
index 0000000000..6a452c185a
--- /dev/null
+++ b/test/files/run/t6392a.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t6392a.scala b/test/files/run/t6392a.scala
new file mode 100644
index 0000000000..3a4f9fd0a5
--- /dev/null
+++ b/test/files/run/t6392a.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val c = tb.parse("object C")
+ println(tb.eval(c))
+} \ No newline at end of file
diff --git a/test/files/run/t6392b.check b/test/files/run/t6392b.check
new file mode 100644
index 0000000000..e9c7ecaa34
--- /dev/null
+++ b/test/files/run/t6392b.check
@@ -0,0 +1 @@
+ModuleDef(Modifiers(), newTermName("C"), Template(List(Select(Ident(scala#PK), newTypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(newTypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
diff --git a/test/files/run/t6392b.scala b/test/files/run/t6392b.scala
new file mode 100644
index 0000000000..f69a5aaf45
--- /dev/null
+++ b/test/files/run/t6392b.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val c = tb.parse("object C")
+ println(showRaw(tb.typeCheck(c), printKinds = true))
+} \ No newline at end of file
diff --git a/test/files/run/t6394a.check b/test/files/run/t6394a.check
new file mode 100644
index 0000000000..2a02d41ce2
--- /dev/null
+++ b/test/files/run/t6394a.check
@@ -0,0 +1 @@
+TEST
diff --git a/test/files/run/t6394a.flags b/test/files/run/t6394a.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/t6394a.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/t6394a/Macros_1.scala b/test/files/run/t6394a/Macros_1.scala
new file mode 100644
index 0000000000..3d39d3e40a
--- /dev/null
+++ b/test/files/run/t6394a/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c:Context): c.Expr[Any] = {
+ import c.universe._
+
+ val selfTree = This(c.enclosingClass.symbol.asModule.moduleClass)
+ c.Expr[AnyRef](selfTree)
+ }
+
+ def foo: Any = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/t6394a/Test_2.scala b/test/files/run/t6394a/Test_2.scala
new file mode 100644
index 0000000000..75e84f0e38
--- /dev/null
+++ b/test/files/run/t6394a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo)
+ override def toString = "TEST"
+} \ No newline at end of file
diff --git a/test/files/run/t6394b.check b/test/files/run/t6394b.check
new file mode 100644
index 0000000000..2a02d41ce2
--- /dev/null
+++ b/test/files/run/t6394b.check
@@ -0,0 +1 @@
+TEST
diff --git a/test/files/run/t6394b.flags b/test/files/run/t6394b.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/t6394b.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/t6394b/Macros_1.scala b/test/files/run/t6394b/Macros_1.scala
new file mode 100644
index 0000000000..5d93e1cda8
--- /dev/null
+++ b/test/files/run/t6394b/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c:Context): c.Expr[Any] = {
+ import c.universe._
+
+ val selfTree = This(tpnme.EMPTY)
+ c.Expr[AnyRef](selfTree)
+ }
+
+ def foo: Any = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/t6394b/Test_2.scala b/test/files/run/t6394b/Test_2.scala
new file mode 100644
index 0000000000..75e84f0e38
--- /dev/null
+++ b/test/files/run/t6394b/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo)
+ override def toString = "TEST"
+} \ No newline at end of file
diff --git a/test/files/run/t6410.check b/test/files/run/t6410.check
new file mode 100644
index 0000000000..051fe4995a
--- /dev/null
+++ b/test/files/run/t6410.check
@@ -0,0 +1,2 @@
+ParMap(0 -> 4, 1 -> 5)
+ParMap(0 -> 4, 1 -> 5) \ No newline at end of file
diff --git a/test/files/run/t6410.scala b/test/files/run/t6410.scala
new file mode 100644
index 0000000000..2a001b47ab
--- /dev/null
+++ b/test/files/run/t6410.scala
@@ -0,0 +1,9 @@
+
+
+
+object Test extends App {
+ val x = collection.parallel.mutable.ParArray.range(1,10) groupBy { _ % 2 } mapValues { _.size }
+ println(x)
+ val y = collection.parallel.immutable.ParVector.range(1,10) groupBy { _ % 2 } mapValues { _.size }
+ println(y)
+} \ No newline at end of file
diff --git a/test/files/run/test-cpp.check b/test/files/run/test-cpp.check
index a7163edb5f..bfea438c60 100644
--- a/test/files/run/test-cpp.check
+++ b/test/files/run/test-cpp.check
@@ -1,65 +1,65 @@
-37c37
-< locals: value args, value x, value y
----
-> locals: value args
-42,43d41
-< 52 CONSTANT(2)
-< 52 STORE_LOCAL(value x)
-45,46d42
-< 53 LOAD_LOCAL(value x)
-< 53 STORE_LOCAL(value y)
-49c45
-< 54 LOAD_LOCAL(value y)
----
-> 54 CONSTANT(2)
-92c88
-< locals: value args, value x, value y
----
-> locals: value args, value x
-101,102d96
-< 82 LOAD_LOCAL(value x)
-< 82 STORE_LOCAL(value y)
-105c99
-< 83 LOAD_LOCAL(value y)
----
-> 83 LOAD_LOCAL(value x)
-135c129
-< locals: value args, value x, value y
----
-> locals: value args
-140,141d133
-< 66 THIS(TestAliasChainDerefThis)
-< 66 STORE_LOCAL(value x)
-143,144d134
-< 67 LOAD_LOCAL(value x)
-< 67 STORE_LOCAL(value y)
-147c137
-< 68 LOAD_LOCAL(value y)
----
-> 68 THIS(Object)
-176c166
-< locals: value x, value y
----
-> locals: value x
-181,182d170
-< 29 LOAD_LOCAL(value x)
-< 29 STORE_LOCAL(value y)
-185c173
-< 30 LOAD_LOCAL(value y)
----
-> 30 LOAD_LOCAL(value x)
-223,224d210
-< 97 LOAD_LOCAL(variable x)
-< 97 STORE_LOCAL(variable y)
-227c213
-< 98 LOAD_LOCAL(variable y)
----
-> 98 LOAD_LOCAL(variable x)
-233,234d218
-< 101 LOAD_LOCAL(variable y)
-< 101 STORE_LOCAL(variable x)
-236c220
-< 102 LOAD_LOCAL(variable x)
----
-> 102 LOAD_LOCAL(variable y)
-
+37c37
+< locals: value args, value x, value y
+---
+> locals: value args
+42,43d41
+< 52 CONSTANT(2)
+< 52 STORE_LOCAL(value x)
+45,46d42
+< 53 LOAD_LOCAL(value x)
+< 53 STORE_LOCAL(value y)
+49c45
+< 54 LOAD_LOCAL(value y)
+---
+> 54 CONSTANT(2)
+92c88
+< locals: value args, value x, value y
+---
+> locals: value args, value x
+101,102d96
+< 82 LOAD_LOCAL(value x)
+< 82 STORE_LOCAL(value y)
+105c99
+< 83 LOAD_LOCAL(value y)
+---
+> 83 LOAD_LOCAL(value x)
+135c129
+< locals: value args, value x, value y
+---
+> locals: value args
+140,141d133
+< 66 THIS(TestAliasChainDerefThis)
+< 66 STORE_LOCAL(value x)
+143,144d134
+< 67 LOAD_LOCAL(value x)
+< 67 STORE_LOCAL(value y)
+147c137
+< 68 LOAD_LOCAL(value y)
+---
+> 68 THIS(Object)
+176c166
+< locals: value x, value y
+---
+> locals: value x
+181,182d170
+< 29 LOAD_LOCAL(value x)
+< 29 STORE_LOCAL(value y)
+185c173
+< 30 LOAD_LOCAL(value y)
+---
+> 30 LOAD_LOCAL(value x)
+223,224d210
+< 97 LOAD_LOCAL(variable x)
+< 97 STORE_LOCAL(variable y)
+227c213
+< 98 LOAD_LOCAL(variable y)
+---
+> 98 LOAD_LOCAL(variable x)
+233,234d218
+< 101 LOAD_LOCAL(variable y)
+< 101 STORE_LOCAL(variable x)
+236c220
+< 102 LOAD_LOCAL(variable x)
+---
+> 102 LOAD_LOCAL(variable y)
+
diff --git a/test/files/run/toolbox_console_reporter.check b/test/files/run/toolbox_console_reporter.check
index e69de29bb2..1395c68740 100644
--- a/test/files/run/toolbox_console_reporter.check
+++ b/test/files/run/toolbox_console_reporter.check
@@ -0,0 +1,8 @@
+hello
+============compiler console=============
+warning: method foo in object Utils is deprecated: test
+
+=========================================
+============compiler messages============
+Info(NoPosition,method foo in object Utils is deprecated: test,WARNING)
+=========================================
diff --git a/test/files/run/toolbox_console_reporter.scala b/test/files/run/toolbox_console_reporter.scala
index c5b788550e..d672ccb9cb 100644
--- a/test/files/run/toolbox_console_reporter.scala
+++ b/test/files/run/toolbox_console_reporter.scala
@@ -1,16 +1,29 @@
import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, mkConsoleFrontEnd}
object Test extends App {
- // todo. cannot test this unfortunately, because ConsoleFrontEnd grabs Console.out too early
- // todo. and isn't affected by Console.setOut employed by partest to intercept output
+ val oldErr = Console.err;
+ val baos = new java.io.ByteArrayOutputStream();
+ Console.setErr(new java.io.PrintStream(baos));
+ try {
+ val toolbox = cm.mkToolBox(frontEnd = mkConsoleFrontEnd(), options = "-deprecation")
+ toolbox.eval(reify{
+ object Utils {
+ @deprecated("test", "2.10.0")
+ def foo { println("hello") }
+ }
- //val toolbox = mkToolBox(frontEnd = mkConsoleFrontEnd(), options = "-deprecation")
- //toolbox.runExpr(reify{
- // object Utils {
- // @deprecated("test", "2.10.0")
- // def foo { println("hello") }
- // }
- //
- // Utils.foo
- //})
+ Utils.foo
+ }.tree)
+ println("============compiler console=============")
+ println(baos.toString);
+ println("=========================================")
+ println("============compiler messages============")
+ toolbox.frontEnd.infos.foreach(println(_))
+ println("=========================================")
+ } finally {
+ Console.setErr(oldErr);
+ }
} \ No newline at end of file
diff --git a/test/files/run/toolbox_default_reporter_is_silent.check b/test/files/run/toolbox_default_reporter_is_silent.check
index ef0493b275..ce01362503 100644
--- a/test/files/run/toolbox_default_reporter_is_silent.check
+++ b/test/files/run/toolbox_default_reporter_is_silent.check
@@ -1 +1 @@
-hello
+hello
diff --git a/test/files/run/toolbox_default_reporter_is_silent.scala b/test/files/run/toolbox_default_reporter_is_silent.scala
index 5f3269b6fa..4bd7a646b0 100644
--- a/test/files/run/toolbox_default_reporter_is_silent.scala
+++ b/test/files/run/toolbox_default_reporter_is_silent.scala
@@ -5,7 +5,7 @@ import scala.tools.reflect.ToolBox
object Test extends App {
val toolbox = cm.mkToolBox()
- toolbox.runExpr(reify{
+ toolbox.eval(reify{
object Utils {
@deprecated("test", "2.10.0")
def foo { println("hello") }
diff --git a/test/files/run/toolbox_silent_reporter.scala b/test/files/run/toolbox_silent_reporter.scala
index 915734e6ad..03b1d6defa 100644
--- a/test/files/run/toolbox_silent_reporter.scala
+++ b/test/files/run/toolbox_silent_reporter.scala
@@ -1,11 +1,11 @@
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
+import scala.tools.reflect.{ToolBox, mkSilentFrontEnd}
object Test extends App {
- val toolbox = cm.mkToolBox(options = "-deprecation")
- toolbox.runExpr(reify{
+ val toolbox = cm.mkToolBox(options = "-deprecation", frontEnd = mkSilentFrontEnd())
+ toolbox.eval(reify{
object Utils {
@deprecated("test", "2.10.0")
def foo { println("hello") }
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.check b/test/files/run/toolbox_typecheck_implicitsdisabled.check
index e2f2dbe4b9..db64e118ca 100644
--- a/test/files/run/toolbox_typecheck_implicitsdisabled.check
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.check
@@ -1,5 +1,5 @@
-{
- import scala.Predef._;
- scala.Predef.any2ArrowAssoc[Int](1).->[Int](2)
-}
-scala.tools.reflect.ToolBoxError: reflective typecheck has failed: value -> is not a member of Int
+{
+ import scala.Predef._;
+ scala.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+}
+scala.tools.reflect.ToolBoxError: reflective typecheck has failed: value -> is not a member of Int
diff --git a/test/files/run/toolbox_typecheck_inferimplicitvalue.check b/test/files/run/toolbox_typecheck_inferimplicitvalue.check
index 23ba536aff..ec17b4203b 100644
--- a/test/files/run/toolbox_typecheck_inferimplicitvalue.check
+++ b/test/files/run/toolbox_typecheck_inferimplicitvalue.check
@@ -1 +1 @@
-C.MC
+C.MC
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.check b/test/files/run/toolbox_typecheck_macrosdisabled.check
index 9cf101c69d..688f37927c 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled.check
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.check
@@ -1,32 +1,41 @@
-{
- val $u: ru.type = ru;
- val $m: $u.Mirror = ru.rootMirror;
- $u.Expr.apply[Int(2)]($m, {
- final class $treecreator1 extends TreeCreator {
- def <init>(): $treecreator1 = {
- $treecreator1.super.<init>();
- ()
- };
- def apply[U <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Tree = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.Literal.apply($u.Constant.apply(2))
- }
- };
- new $treecreator1()
- })($u.TypeTag.apply[Int(2)]($m, {
- final class $typecreator2 extends TypeCreator {
- def <init>(): $typecreator2 = {
- $typecreator2.super.<init>();
- ()
- };
- def apply[U <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Type = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.ConstantType.apply($u.Constant.apply(2))
- }
- };
- new $typecreator2()
- }))
-}
-ru.reify[Int](2)
+{
+ val $u: ru.type = ru;
+ val $m: $u.Mirror = ru.runtimeMirror({
+ final class $anon extends scala.AnyRef {
+ def <init>(): anonymous class $anon = {
+ $anon.super.<init>();
+ ()
+ };
+ ()
+ };
+ new $anon()
+}.getClass().getClassLoader());
+ $u.Expr.apply[Int(2)]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Literal.apply($u.Constant.apply(2))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Int(2)]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.ConstantType.apply($u.Constant.apply(2))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Int](2)
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.scala b/test/files/run/toolbox_typecheck_macrosdisabled.scala
index bcbd637e02..51eb63f294 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled.scala
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.scala
@@ -3,12 +3,17 @@ import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
+// Note: If you're looking at this test and you don't know why, you may
+// have accidentally changed the way type tags reify. If so, validate
+// that your changes are accurate and update the check file.
+
object Test extends App {
val toolbox = cm.mkToolBox()
val rupkg = cm.staticModule("scala.reflect.runtime.package")
val rusym = build.selectTerm(rupkg, "universe")
val NullaryMethodType(rutpe) = rusym.typeSignature
- val ru = build.newFreeTerm("ru", rutpe, scala.reflect.runtime.universe)
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
@@ -17,4 +22,4 @@ object Test extends App {
val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
println(ttree2)
-} \ No newline at end of file
+}
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.check b/test/files/run/toolbox_typecheck_macrosdisabled2.check
index d344e33180..f5c9b6eeab 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled2.check
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.check
@@ -1,32 +1,41 @@
-{
- val $u: ru.type = ru;
- val $m: $u.Mirror = ru.rootMirror;
- $u.Expr.apply[Array[Int]]($m, {
- final class $treecreator1 extends TreeCreator {
- def <init>(): $treecreator1 = {
- $treecreator1.super.<init>();
- ()
- };
- def apply[U <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Tree = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.Apply.apply($u.Select.apply($u.Select.apply($u.build.Ident($m.staticPackage("scala")), $u.newTermName("Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
- }
- };
- new $treecreator1()
- })($u.TypeTag.apply[Array[Int]]($m, {
- final class $typecreator2 extends TypeCreator {
- def <init>(): $typecreator2 = {
- $typecreator2.super.<init>();
- ()
- };
- def apply[U <: scala.reflect.base.Universe with Singleton]($m$untyped: scala.reflect.base.MirrorOf[U]): U#Type = {
- val $u: U = $m$untyped.universe;
- val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
- }
- };
- new $typecreator2()
- }))
-}
-ru.reify[Array[Int]](scala.Array.apply(2))
+{
+ val $u: ru.type = ru;
+ val $m: $u.Mirror = ru.runtimeMirror({
+ final class $anon extends scala.AnyRef {
+ def <init>(): anonymous class $anon = {
+ $anon.super.<init>();
+ ()
+ };
+ ()
+ };
+ new $anon()
+}.getClass().getClassLoader());
+ $u.Expr.apply[Array[Int]]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Apply.apply($u.Select.apply($u.Select.apply($u.build.Ident($m.staticPackage("scala")), $u.newTermName("Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Array[Int]]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Array[Int]](scala.Array.apply(2))
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.scala b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
index 51493ffc1c..74fd09d9fd 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled2.scala
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
@@ -3,12 +3,17 @@ import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
+// Note: If you're looking at this test and you don't know why, you may
+// have accidentally changed the way type tags reify. If so, validate
+// that your changes are accurate and update the check file.
+
object Test extends App {
val toolbox = cm.mkToolBox()
val rupkg = cm.staticModule("scala.reflect.runtime.package")
val rusym = build.selectTerm(rupkg, "universe")
val NullaryMethodType(rutpe) = rusym.typeSignature
- val ru = build.newFreeTerm("ru", rutpe, scala.reflect.runtime.universe)
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
diff --git a/test/files/run/try-catch-unify.check b/test/files/run/try-catch-unify.check
new file mode 100644
index 0000000000..67a8c64a33
--- /dev/null
+++ b/test/files/run/try-catch-unify.check
@@ -0,0 +1,4 @@
+Failure(java.lang.NumberFormatException: For input string: "Hi")
+Success(5.0)
+O NOES
+Failure(java.lang.NumberFormatException: For input string: "Hi")
diff --git a/test/files/run/try-catch-unify.scala b/test/files/run/try-catch-unify.scala
new file mode 100644
index 0000000000..8cb14d060e
--- /dev/null
+++ b/test/files/run/try-catch-unify.scala
@@ -0,0 +1,16 @@
+import util._
+
+import control.Exception._
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(catching(classOf[NumberFormatException]) withTry ("Hi".toDouble))
+ println(catching(classOf[NumberFormatException]) withTry ("5".toDouble))
+ try {
+ catching(classOf[NumberFormatException]) withTry (sys.error("O NOES"))
+ } catch {
+ case t => println(t.getMessage)
+ }
+ println(nonFatalCatch withTry ("Hi".toDouble))
+ }
+}
diff --git a/test/files/run/typetags_core.check b/test/files/run/typetags_core.check
index 8d20e099c4..980b4719bf 100644
--- a/test/files/run/typetags_core.check
+++ b/test/files/run/typetags_core.check
@@ -1,30 +1,30 @@
-true
-TypeTag[Byte]
-true
-TypeTag[Short]
-true
-TypeTag[Char]
-true
-TypeTag[Int]
-true
-TypeTag[Long]
-true
-TypeTag[Float]
-true
-TypeTag[Double]
-true
-TypeTag[Boolean]
-true
-TypeTag[Unit]
-true
-TypeTag[Any]
-true
-TypeTag[AnyVal]
-true
-TypeTag[AnyRef]
-true
-TypeTag[java.lang.Object]
-true
-TypeTag[Null]
-true
-TypeTag[Nothing]
+true
+TypeTag[Byte]
+true
+TypeTag[Short]
+true
+TypeTag[Char]
+true
+TypeTag[Int]
+true
+TypeTag[Long]
+true
+TypeTag[Float]
+true
+TypeTag[Double]
+true
+TypeTag[Boolean]
+true
+TypeTag[Unit]
+true
+TypeTag[Any]
+true
+TypeTag[AnyVal]
+true
+TypeTag[AnyRef]
+true
+TypeTag[java.lang.Object]
+true
+TypeTag[Null]
+true
+TypeTag[Nothing]
diff --git a/test/files/run/typetags_serialize.check b/test/files/run/typetags_serialize.check
index 1b898250fb..f79436ea5d 100644
--- a/test/files/run/typetags_serialize.check
+++ b/test/files/run/typetags_serialize.check
@@ -1,2 +1,2 @@
-java.io.NotSerializableException: scala.reflect.base.TypeTags$PredefTypeCreator
-java.io.NotSerializableException: Test$$typecreator1$1
+java.io.NotSerializableException: scala.reflect.api.TypeTags$PredefTypeCreator
+java.io.NotSerializableException: Test$$typecreator1$1
diff --git a/test/files/run/typetags_serialize.scala b/test/files/run/typetags_serialize.scala
index 3917b69a93..3c842e6cc9 100644
--- a/test/files/run/typetags_serialize.scala
+++ b/test/files/run/typetags_serialize.scala
@@ -1,5 +1,6 @@
import java.io._
import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
@@ -13,7 +14,7 @@ object Test extends App {
val fin = new ByteArrayInputStream(fout.toByteArray)
val in = new ObjectInputStream(fin)
- val retag = in.readObject().asInstanceOf[scala.reflect.basis.TypeTag[_]].in(cm)
+ val retag = in.readObject().asInstanceOf[ru.TypeTag[_]].in(cm)
in.close()
fin.close()
diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.check b/test/files/run/typetags_without_scala_reflect_manifest_lookup.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_manifest_lookup.check
diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala
new file mode 100644
index 0000000000..6fd3d2dc2b
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest._
+import scala.tools.nsc.Settings
+
+object Test extends DirectTest {
+ override def extraSettings = "-cp " + sys.props("partest.lib") + " -d \"" + testOutput.path + "\""
+
+ def code = """
+ object Test extends App {
+ // manifest lookup also involves type tag lookup
+ // because we support manifest <-> typetag convertability
+ //
+ // however when scala-reflect.jar (the home of type tags) is not on the classpath
+ // we need to omit the type tag lookup, because we lack the necessary symbols
+ // to do implicit search and tag materialization
+ // (such missing symbols are e.g. ApiUniverseClass and TypeTagsClass)
+ //
+ // the test case you're looking at checks exactly this
+ // we establish a classpath that only includes scala-library.jar
+ // and then force scalac to perform implicit search for a manifest
+ // if type tag lookup is not disabled, the compiler will crash
+ // if it is disabled, then the compilation will succeed
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/166ce4b71b7c46bb
+ def foo[T: Manifest] = ()
+ foo[List[Int]]
+ }
+ """
+
+ def show = compile()
+} \ No newline at end of file
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
new file mode 100644
index 0000000000..53df68cfc2
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
@@ -0,0 +1,3 @@
+newSource1:9: error: could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int]
+ Library.foo[Int]
+ ^
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
new file mode 100644
index 0000000000..e51ecdb180
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
@@ -0,0 +1,45 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ def code = ???
+
+ def library = """
+ import scala.reflect.runtime.universe._
+
+ object Library {
+ def foo[T: TypeTag] = ()
+ }
+ """
+ def compileLibrary() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library)
+ }
+
+ def app = """
+ object Test extends App {
+ // tries to materialize a type tag not having scala-reflect.jar on the classpath
+ // even though it's easy to materialize a type tag of Int, this line will fail
+ // because materialization involves classes from scala-reflect.jar
+ //
+ // in this test we make sure that the compiler doesn't crash
+ // but just displays several missing class file errors and an unavailable implicit message
+ Library.foo[Int]
+ }
+ """
+ def compileApp() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app)
+ }
+
+ def show(): Unit = {
+ val prevErr = System.err
+ val baos = new java.io.ByteArrayOutputStream();
+ System.setErr(new java.io.PrintStream(baos));
+ compileLibrary();
+ compileApp();
+ // we should get bad symbolic reference errors, because we're trying to call a method that can't be unpickled
+ // but we don't know the number of these errors and their order, so I just ignore them all
+ baos.toString.split("\n") filter (!_.startsWith("error: bad symbolic reference")) foreach println
+ System.setErr(prevErr)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
new file mode 100644
index 0000000000..729c0715df
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
@@ -0,0 +1,3 @@
+newSource1:9: error: No Manifest available for App.this.T.
+ manifest[T]
+ ^
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
new file mode 100644
index 0000000000..e984127583
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
@@ -0,0 +1,46 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ def code = ???
+
+ def library = """
+ import scala.reflect.runtime.universe._
+
+ trait Library {
+ type T
+ implicit val tt: TypeTag[T]
+ }
+ """
+ def compileLibrary() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library)
+ }
+
+ def app = """
+ trait App extends Library {
+ // tries to create a manifest from a type tag without having scala-reflect.jar on the classpath
+ // even though it's possible to convert a type tag into a manifest, this will fail
+ // because conversion requires classes from scala-reflect.jar
+ //
+ // in this test we make sure that the compiler doesn't crash
+ // but just displays several missing class file errors and an unavailable implicit message
+ manifest[T]
+ }
+ """
+ def compileApp() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app)
+ }
+
+ def show(): Unit = {
+ val prevErr = System.err
+ val baos = new java.io.ByteArrayOutputStream();
+ System.setErr(new java.io.PrintStream(baos));
+ compileLibrary();
+ compileApp();
+ // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled
+ // but we don't know the number of these errors and their order, so I just ignore them all
+ baos.toString.split("\n") filter (!_.startsWith("error: bad symbolic reference")) foreach println
+ System.setErr(prevErr)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/valueclasses-classmanifest-basic.check b/test/files/run/valueclasses-classmanifest-basic.check
index 554c75e074..bc56c4d894 100644
--- a/test/files/run/valueclasses-classmanifest-basic.check
+++ b/test/files/run/valueclasses-classmanifest-basic.check
@@ -1 +1 @@
-Foo
+Foo
diff --git a/test/files/run/valueclasses-classmanifest-existential.check b/test/files/run/valueclasses-classmanifest-existential.check
index e9fc6e27ea..4577aacc0e 100644
--- a/test/files/run/valueclasses-classmanifest-existential.check
+++ b/test/files/run/valueclasses-classmanifest-existential.check
@@ -1 +1 @@
-Foo[<?>]
+Foo[<?>]
diff --git a/test/files/run/valueclasses-classmanifest-generic.check b/test/files/run/valueclasses-classmanifest-generic.check
index 1418c5cff9..c6be42d550 100644
--- a/test/files/run/valueclasses-classmanifest-generic.check
+++ b/test/files/run/valueclasses-classmanifest-generic.check
@@ -1 +1 @@
-Foo[java.lang.String]
+Foo[java.lang.String]
diff --git a/test/files/run/valueclasses-classtag-basic.check b/test/files/run/valueclasses-classtag-basic.check
index 0c13986b32..bc56c4d894 100644
--- a/test/files/run/valueclasses-classtag-basic.check
+++ b/test/files/run/valueclasses-classtag-basic.check
@@ -1 +1 @@
-ClassTag[class Foo]
+Foo
diff --git a/test/files/run/valueclasses-classtag-existential.check b/test/files/run/valueclasses-classtag-existential.check
index 95e94e7aee..9e2b9e1da7 100644
--- a/test/files/run/valueclasses-classtag-existential.check
+++ b/test/files/run/valueclasses-classtag-existential.check
@@ -1 +1 @@
-ClassTag[class java.lang.Object]
+Object
diff --git a/test/files/run/valueclasses-classtag-generic.check b/test/files/run/valueclasses-classtag-generic.check
index 0c13986b32..bc56c4d894 100644
--- a/test/files/run/valueclasses-classtag-generic.check
+++ b/test/files/run/valueclasses-classtag-generic.check
@@ -1 +1 @@
-ClassTag[class Foo]
+Foo
diff --git a/test/files/run/valueclasses-manifest-basic.check b/test/files/run/valueclasses-manifest-basic.check
index 554c75e074..bc56c4d894 100644
--- a/test/files/run/valueclasses-manifest-basic.check
+++ b/test/files/run/valueclasses-manifest-basic.check
@@ -1 +1 @@
-Foo
+Foo
diff --git a/test/files/run/valueclasses-manifest-existential.check b/test/files/run/valueclasses-manifest-existential.check
index fdce051039..f91a575ea7 100644
--- a/test/files/run/valueclasses-manifest-existential.check
+++ b/test/files/run/valueclasses-manifest-existential.check
@@ -1 +1 @@
-Foo[_ <: Any]
+Foo[_ <: Any]
diff --git a/test/files/run/valueclasses-manifest-generic.check b/test/files/run/valueclasses-manifest-generic.check
index 1418c5cff9..c6be42d550 100644
--- a/test/files/run/valueclasses-manifest-generic.check
+++ b/test/files/run/valueclasses-manifest-generic.check
@@ -1 +1 @@
-Foo[java.lang.String]
+Foo[java.lang.String]
diff --git a/test/files/run/valueclasses-pavlov.check b/test/files/run/valueclasses-pavlov.check
new file mode 100644
index 0000000000..b112e5507e
--- /dev/null
+++ b/test/files/run/valueclasses-pavlov.check
@@ -0,0 +1,2 @@
+box1: ok
+box2: ok
diff --git a/test/files/run/valueclasses-pavlov.scala b/test/files/run/valueclasses-pavlov.scala
new file mode 100644
index 0000000000..e73897f653
--- /dev/null
+++ b/test/files/run/valueclasses-pavlov.scala
@@ -0,0 +1,26 @@
+trait Foo extends Any {
+ def box1(x: Box1): String
+ def box2(x: Box2): String
+}
+
+class Box1(val value: String) extends AnyVal
+
+class Box2(val value: String) extends AnyVal with Foo {
+ def box1(x: Box1) = "box1: ok"
+ def box2(x: Box2) = "box2: ok"
+}
+
+class C(x: String) {
+ def this() = this("")
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ val b1 = new Box1("")
+ val b2 = new Box2("")
+ val f: Foo = b2
+ println(f.box1(b1))
+ println(f.box2(b2))
+ }
+}
diff --git a/test/files/run/valueclasses-typetag-basic.check b/test/files/run/valueclasses-typetag-basic.check
index 554c75e074..bc56c4d894 100644
--- a/test/files/run/valueclasses-typetag-basic.check
+++ b/test/files/run/valueclasses-typetag-basic.check
@@ -1 +1 @@
-Foo
+Foo
diff --git a/test/files/run/valueclasses-typetag-existential.check b/test/files/run/valueclasses-typetag-existential.check
index 0efa24a45f..d166a13fdc 100644
--- a/test/files/run/valueclasses-typetag-existential.check
+++ b/test/files/run/valueclasses-typetag-existential.check
@@ -1 +1 @@
-Foo[_]
+Foo[_]
diff --git a/test/files/run/valueclasses-typetag-generic.check b/test/files/run/valueclasses-typetag-generic.check
index fce2e64f79..534d1b37c4 100644
--- a/test/files/run/valueclasses-typetag-generic.check
+++ b/test/files/run/valueclasses-typetag-generic.check
@@ -1 +1 @@
-Foo[String]
+Foo[String]
diff --git a/test/files/run/virtpatmat_typetag.check b/test/files/run/virtpatmat_typetag.check
index f9800b84d0..cac9d9a4d6 100644
--- a/test/files/run/virtpatmat_typetag.check
+++ b/test/files/run/virtpatmat_typetag.check
@@ -1,10 +1,10 @@
-1 is not a ClassTag[int]; it's a class java.lang.Integer
-1 is a ClassTag[class java.lang.Integer]
-1 is not a ClassTag[class java.lang.String]; it's a class java.lang.Integer
-true is a ClassTag[class java.lang.Object]
-woele is a ClassTag[class java.lang.String]
-1 is not a ClassTag[int]; it's a class java.lang.Integer
-1 is a ClassTag[class java.lang.Integer]
-1 is not a ClassTag[class java.lang.String]; it's a class java.lang.Integer
-true is a ClassTag[class java.lang.Object]
-woele is a ClassTag[class java.lang.String]
+1 is not a Int; it's a class java.lang.Integer
+1 is a java.lang.Integer
+1 is not a java.lang.String; it's a class java.lang.Integer
+true is a Any
+woele is a java.lang.String
+1 is not a Int; it's a class java.lang.Integer
+1 is a java.lang.Integer
+1 is not a java.lang.String; it's a class java.lang.Integer
+true is a Any
+woele is a java.lang.String
diff --git a/test/files/scalacheck/duration.scala b/test/files/scalacheck/duration.scala
new file mode 100644
index 0000000000..5e93638614
--- /dev/null
+++ b/test/files/scalacheck/duration.scala
@@ -0,0 +1,69 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import math._
+import concurrent.duration.Duration.fromNanos
+
+object Test extends Properties("Division of Duration by Long") {
+
+ val weightedLong =
+ frequency(
+ 1 -> choose(-128L, 127L),
+ 1 -> (arbitrary[Byte] map (_.toLong << 8)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 16)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 24)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 32)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 40)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 48)),
+ 1 -> (choose(-127L, 127L) map (_ << 56))
+ )
+
+ val genTwoSmall = for {
+ a <- weightedLong
+ b <- choose(-(Long.MaxValue / max(1, abs(a))), Long.MaxValue / max(1, abs(a)))
+ } yield (a, b)
+
+ val genTwoLarge = for {
+ a <- weightedLong
+ b <- arbitrary[Long] suchThat (b => (abs(b) > Long.MaxValue / max(1, abs(a))))
+ } yield (a, b)
+
+ val genClose = for {
+ a <- weightedLong
+ if a != 0
+ b <- choose(Long.MaxValue / a - 10, Long.MaxValue / a + 10)
+ } yield (a, b)
+
+ val genBorderline =
+ frequency(
+ 1 -> (Long.MinValue, 0L),
+ 1 -> (Long.MinValue, 1L),
+ 1 -> (Long.MinValue, -1L),
+ 1 -> (0L, Long.MinValue),
+ 1 -> (1L, Long.MinValue),
+ 1 -> (-1L, Long.MinValue),
+ 90 -> genClose
+ )
+
+ def mul(a: Long, b: Long): Long = {
+ (fromNanos(a) * b).toNanos
+ }
+
+ property("without overflow") = forAll(genTwoSmall) { case (a, b) =>
+ a * b == mul(a, b)
+ }
+
+ property("with overflow") = forAll(genTwoLarge) { case (a, b) =>
+ try { mul(a, b); false } catch { case _: IllegalArgumentException => true }
+ }
+
+ property("on overflow edge cases") = forAll(genBorderline) { case (a, b) =>
+ val shouldFit =
+ a != Long.MinValue && // must fail due to illegal duration length
+ (b != Long.MinValue || a == 0) && // Long factor may only be MinValue if the duration is zero, otherwise the result will be illegal
+ (abs(b) <= Long.MaxValue / max(1, abs(a))) // check the rest against the “safe” division method
+ try { mul(a, b); shouldFit }
+ catch { case _: IllegalArgumentException => !shouldFit }
+ }
+}
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
index e2609fa200..bc7f92aa1b 100644
--- a/test/files/scalacheck/redblacktree.scala
+++ b/test/files/scalacheck/redblacktree.scala
@@ -205,6 +205,45 @@ package scala.collection.immutable.redblacktree {
filteredTree == keysIterator(newTree).toList
}
}
+
+ object TestDrop extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = drop(tree, parm)
+
+ property("drop") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).drop(parm).toList == iterator(newTree).toList
+ }
+ }
+
+ object TestTake extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = take(tree, parm)
+
+ property("take") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).take(parm).toList == iterator(newTree).toList
+ }
+ }
+
+ object TestSlice extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Int, Int)
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(from, iterator(tree).size)
+ } yield (from, to)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = slice(tree, parm._1, parm._2)
+
+ property("slice") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).slice(parm._1, parm._2).toList == iterator(newTree).toList
+ }
+ }
}
object Test extends Properties("RedBlackTree") {
@@ -213,4 +252,7 @@ object Test extends Properties("RedBlackTree") {
include(TestModify)
include(TestDelete)
include(TestRange)
+ include(TestDrop)
+ include(TestTake)
+ include(TestSlice)
}
diff --git a/test/files/pos/t2868.cmds b/test/flaky/pos/t2868.cmds
index ed8124a9e0..ed8124a9e0 100644
--- a/test/files/pos/t2868.cmds
+++ b/test/flaky/pos/t2868.cmds
diff --git a/test/files/pos/t2868/Jann.java b/test/flaky/pos/t2868/Jann.java
index f5b68de7b0..f5b68de7b0 100644
--- a/test/files/pos/t2868/Jann.java
+++ b/test/flaky/pos/t2868/Jann.java
diff --git a/test/files/pos/t2868/Nest.java b/test/flaky/pos/t2868/Nest.java
index 53652291ad..53652291ad 100644
--- a/test/files/pos/t2868/Nest.java
+++ b/test/flaky/pos/t2868/Nest.java
diff --git a/test/files/pos/t2868/pick_1.scala b/test/flaky/pos/t2868/pick_1.scala
index a211687432..a211687432 100644
--- a/test/files/pos/t2868/pick_1.scala
+++ b/test/flaky/pos/t2868/pick_1.scala
diff --git a/test/files/pos/t2868/t2868_src_2.scala b/test/flaky/pos/t2868/t2868_src_2.scala
index f11ef0fae2..f11ef0fae2 100644
--- a/test/files/pos/t2868/t2868_src_2.scala
+++ b/test/flaky/pos/t2868/t2868_src_2.scala
diff --git a/test/osgi/src/BasicLibrary.scala b/test/osgi/src/BasicLibrary.scala
new file mode 100644
index 0000000000..6618f02102
--- /dev/null
+++ b/test/osgi/src/BasicLibrary.scala
@@ -0,0 +1,37 @@
+package tools.test.osgi
+package libonly
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+
+@RunWith(classOf[JUnit4TestRunner])
+@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class BasicLibraryTest extends ScalaOsgiHelper {
+ @Configuration
+ def config(): Array[exam.Option] =
+ justCoreLibraryOptions
+
+ @Test
+ def everythingLoads(): Unit = {
+ // Note - This tests sun.misc usage.
+ import scala.concurrent._
+ import scala.concurrent.duration.Duration.Inf
+ import ExecutionContext.Implicits._
+ val x = Future(2) map (_ + 1)
+ assertEquals(3, Await.result(x, Inf))
+ }
+}
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
new file mode 100644
index 0000000000..8a0a05d531
--- /dev/null
+++ b/test/osgi/src/BasicReflection.scala
@@ -0,0 +1,66 @@
+package tools.test.osgi
+package reflection
+package basic
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+class C {
+ val f1 = 2
+ var f2 = 3
+
+ def m1 = 4
+ def m2() = 5
+ def m3[T >: String <: Int]: T = ???
+ def m4[A[_], B <: A[Int]](x: A[B])(implicit y: Int) = ???
+ def m5(x: => Int, y: Int*): String = ???
+
+ class C
+ object M
+
+ override def toString = "an instance of C"
+}
+object M
+
+
+@RunWith(classOf[JUnit4TestRunner])
+@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class BasicReflectionTest extends ScalaOsgiHelper {
+
+ @Configuration
+ def config(): Array[exam.Option] =
+ justReflectionOptions
+
+ // Ensure Pax-exam requires C/M in our module
+ def dummy = {
+ new C
+ M.toString
+ }
+
+ @Test
+ def basicMirrorThroughOsgi(): Unit = {
+ // Note for now just assert that we can do this stuff.
+ import scala.reflect.runtime.universe._
+ val cm = runtimeMirror(classOf[C].getClassLoader)
+ val im = cm.reflect(new C)
+ assertEquals("Unable to reflect field name!",
+ "value f1",
+ im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).symbol.toString)
+ assertEquals("Unable to reflect value!",
+ 2,
+ im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).get)
+ }
+}
diff --git a/test/osgi/src/BasicTest.scala b/test/osgi/src/BasicTest.scala
new file mode 100644
index 0000000000..109b7b911a
--- /dev/null
+++ b/test/osgi/src/BasicTest.scala
@@ -0,0 +1,33 @@
+package tools.test.osgi
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+
+
+
+@RunWith(classOf[JUnit4TestRunner])
+@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class BasicTest extends ScalaOsgiHelper {
+ @Configuration
+ def config(): Array[exam.Option] = {
+ // TODO - Find scala bundles.
+ standardOptions
+ }
+
+ @Test
+ def everythingLoads(): Unit = ()
+}
diff --git a/test/osgi/src/ReflectionToolboxTest.scala b/test/osgi/src/ReflectionToolboxTest.scala
new file mode 100644
index 0000000000..bb48078e95
--- /dev/null
+++ b/test/osgi/src/ReflectionToolboxTest.scala
@@ -0,0 +1,49 @@
+package tools.test.osgi
+package reflection
+package toolbox
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+class C {
+ val f1 = 2
+}
+
+@RunWith(classOf[JUnit4TestRunner])
+@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class ReflectionToolBoxTest extends ScalaOsgiHelper {
+
+ @Configuration
+ def config(): Array[exam.Option] =
+ standardOptions
+
+
+ @Test
+ def basicMirrorThroughOsgi(): Unit = {
+ // Note - this tries to make sure when pulling a toolbox, we get the compiler.
+ import scala.reflect.runtime.universe._
+ import scala.tools.reflect.ToolBox
+ val cm = runtimeMirror(classOf[C].getClassLoader)
+ val tb = cm.mkToolBox()
+ val im = cm.reflect(new C)
+ val tree = tb.parse("1 to 3 map (_+1)")
+ val eval = tb.eval(tree)
+ assertEquals(Vector(2, 3, 4), eval)
+ assertEquals("Evaluate expression using local class.",
+ 2,
+ tb.eval(tb.parse("(new tools.test.osgi.reflection.toolbox.C).f1")))
+ }
+}
diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala
new file mode 100644
index 0000000000..bcdc5c0df1
--- /dev/null
+++ b/test/osgi/src/ScalaOsgiHelper.scala
@@ -0,0 +1,36 @@
+package tools.test.osgi
+
+import org.ops4j.pax.exam.CoreOptions._
+import org.ops4j.pax.exam
+import java.io.File
+
+trait ScalaOsgiHelper {
+
+ private def allBundleFiles = {
+ def bundleLocation = new File(sys.props.getOrElse("scala.bundle.dir", "build/osgi"))
+ bundleLocation.listFiles filter (_.getName endsWith ".jar")
+ }
+
+ private def filteredBundleFiles(names: String*): Array[exam.Option] =
+ for(bundle <- allBundleFiles; if names exists (bundle.getName contains))
+ yield makeBundle(bundle)
+
+ private def makeBundle(file: File): exam.Option =
+ bundle(file.toURI.toASCIIString)
+
+ def standardOptions: Array[exam.Option] = {
+ val bundles = (allBundleFiles map makeBundle)
+ bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ }
+
+ def justReflectionOptions: Array[exam.Option] = {
+ val bundles = filteredBundleFiles("scala-library", "scala-reflect")
+ bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ }
+
+ def justCoreLibraryOptions: Array[exam.Option] = {
+ val bundles = filteredBundleFiles("scala-library")
+ bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ }
+
+}
diff --git a/test/partest.bat b/test/partest.bat
index 4c97a53122..b64347ce13 100755
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -1,104 +1,104 @@
-@echo off
-
-rem ##########################################################################
-rem # Scala code runner 2.9.1.final
-rem ##########################################################################
-rem # (c) 2002-2011 LAMP/EPFL
-rem #
-rem # This is free software; see the distribution for copying conditions.
-rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
-rem # PARTICULAR PURPOSE.
-rem ##########################################################################
-
-rem We adopt the following conventions:
-rem - System/user environment variables start with a letter
-rem - Local batch variables start with an underscore ('_')
-
-if "%OS%"=="Windows_NT" (
- @setlocal
- call :set_home
- set _ARGS=%*
-) else (
- set _SCALA_HOME="%SCALA_HOME%"
- rem The following line tests SCALA_HOME instead of _SCALA_HOME, because
- rem the above change to _SCALA_HOME is not visible within this block.
- if "%SCALA_HOME%"=="" goto error1
- call :set_args
-)
-
-rem We use the value of the JAVACMD environment variable if defined
-set _JAVACMD=%JAVACMD%
-if "%_JAVACMD%"=="" set _JAVACMD=java
-
-rem We use the value of the JAVACCMD environment variable if defined
-set _JAVACCMD=%JAVACCMD%
-if "%_JAVACCMD%"=="" set _JAVACCMD=javac
-
-rem We use the value of the JAVA_OPTS environment variable if defined
-set _JAVA_OPTS=%JAVA_OPTS%
-if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=-Xmx1024M -Xms64M
-
-rem We use the value of the SCALAC_OPTS environment variable if defined
-set _SCALAC_OPTS=%SCALAC_OPTS%
-if "%_SCALAC_OPTS%"=="" set _SCALAC_OPTS=-deprecation
-
-set _EXTENSION_CLASSPATH=
-if "%_EXTENSION_CLASSPATH%"=="" (
- if exist "%_SCALA_HOME%\lib\scala-partest.jar" (
- for %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
- if "%OS%"=="Windows_NT" (
- for /d %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
- )
- ) else if exist "%_SCALA_HOME%\build\pack\lib\scala-partest.jar" (
- for %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
- if "%OS%"=="Windows_NT" (
- for /d %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
- )
- )
-)
-
-set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dpartest.javacmd="%_JAVACMD%" -Dpartest.java_options="%_JAVA_OPTS%" -Dpartest.scalac_options="%_SCALAC_OPTS%" -Dpartest.javac_cmd="%_JAVACCMD%"
-
-rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
-%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
-goto end
-
-rem ##########################################################################
-rem # subroutines
-
-:add_cpath
- if "%_EXTENSION_CLASSPATH%"=="" (
- set _EXTENSION_CLASSPATH=%~1
- ) else (
- set _EXTENSION_CLASSPATH=%_EXTENSION_CLASSPATH%;%~1
- )
-goto :eof
-
-rem Variable "%~dps0" works on WinXP SP2 or newer
-rem (see http://support.microsoft.com/?kbid=833431)
-rem set _SCALA_HOME=%~dps0..
-:set_home
- set _BIN_DIR=
- for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
- set _SCALA_HOME=%_BIN_DIR%..
-goto :eof
-
-:set_args
- set _ARGS=
- :loop
- rem Argument %1 may contain quotes so we use parentheses here
- if (%1)==() goto :eof
- set _ARGS=%_ARGS% %1
- shift
- goto loop
-
-rem ##########################################################################
-rem # errors
-
-:error1
-echo ERROR: environment variable SCALA_HOME is undefined. It should point to your installation directory.
-goto end
-
-:end
-if "%OS%"=="Windows_NT" @endlocal
-exit /b %errorlevel%
+@echo off
+
+rem ##########################################################################
+rem # Scala code runner 2.9.1.final
+rem ##########################################################################
+rem # (c) 2002-2011 LAMP/EPFL
+rem #
+rem # This is free software; see the distribution for copying conditions.
+rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
+rem # PARTICULAR PURPOSE.
+rem ##########################################################################
+
+rem We adopt the following conventions:
+rem - System/user environment variables start with a letter
+rem - Local batch variables start with an underscore ('_')
+
+if "%OS%"=="Windows_NT" (
+ @setlocal
+ call :set_home
+ set _ARGS=%*
+) else (
+ set _SCALA_HOME="%SCALA_HOME%"
+ rem The following line tests SCALA_HOME instead of _SCALA_HOME, because
+ rem the above change to _SCALA_HOME is not visible within this block.
+ if "%SCALA_HOME%"=="" goto error1
+ call :set_args
+)
+
+rem We use the value of the JAVACMD environment variable if defined
+set _JAVACMD=%JAVACMD%
+if "%_JAVACMD%"=="" set _JAVACMD=java
+
+rem We use the value of the JAVACCMD environment variable if defined
+set _JAVACCMD=%JAVACCMD%
+if "%_JAVACCMD%"=="" set _JAVACCMD=javac
+
+rem We use the value of the JAVA_OPTS environment variable if defined
+set _JAVA_OPTS=%JAVA_OPTS%
+if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=-Xmx1024M -Xms64M
+
+rem We use the value of the SCALAC_OPTS environment variable if defined
+set _SCALAC_OPTS=%SCALAC_OPTS%
+if "%_SCALAC_OPTS%"=="" set _SCALAC_OPTS=-deprecation
+
+set _EXTENSION_CLASSPATH=
+if "%_EXTENSION_CLASSPATH%"=="" (
+ if exist "%_SCALA_HOME%\lib\scala-partest.jar" (
+ for %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
+ if "%OS%"=="Windows_NT" (
+ for /d %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
+ )
+ ) else if exist "%_SCALA_HOME%\build\pack\lib\scala-partest.jar" (
+ for %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
+ if "%OS%"=="Windows_NT" (
+ for /d %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
+ )
+ )
+)
+
+set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dpartest.javacmd="%_JAVACMD%" -Dpartest.java_options="%_JAVA_OPTS%" -Dpartest.scalac_options="%_SCALAC_OPTS%" -Dpartest.javac_cmd="%_JAVACCMD%"
+
+rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
+%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
+goto end
+
+rem ##########################################################################
+rem # subroutines
+
+:add_cpath
+ if "%_EXTENSION_CLASSPATH%"=="" (
+ set _EXTENSION_CLASSPATH=%~1
+ ) else (
+ set _EXTENSION_CLASSPATH=%_EXTENSION_CLASSPATH%;%~1
+ )
+goto :eof
+
+rem Variable "%~dps0" works on WinXP SP2 or newer
+rem (see http://support.microsoft.com/?kbid=833431)
+rem set _SCALA_HOME=%~dps0..
+:set_home
+ set _BIN_DIR=
+ for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
+ set _SCALA_HOME=%_BIN_DIR%..
+goto :eof
+
+:set_args
+ set _ARGS=
+ :loop
+ rem Argument %1 may contain quotes so we use parentheses here
+ if (%1)==() goto :eof
+ set _ARGS=%_ARGS% %1
+ shift
+ goto loop
+
+rem ##########################################################################
+rem # errors
+
+:error1
+echo ERROR: environment variable SCALA_HOME is undefined. It should point to your installation directory.
+goto end
+
+:end
+if "%OS%"=="Windows_NT" @endlocal
+exit /b %errorlevel%
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b.check b/test/pending/neg/macro-invalidusage-badbounds-b.check
new file mode 100644
index 0000000000..277f407d38
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:7: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String]
+ foo[Int]
+ ^
+one error found
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b.flags b/test/pending/neg/macro-invalidusage-badbounds-b.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
index 89020de7dd..89020de7dd 100644
--- a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
+++ b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala
new file mode 100644
index 0000000000..3139599108
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U <: String] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[Int]
+} \ No newline at end of file
diff --git a/test/pending/neg/reify_packed.check b/test/pending/neg/reify_packed.check
index adba330d56..f26b902896 100644
--- a/test/pending/neg/reify_packed.check
+++ b/test/pending/neg/reify_packed.check
@@ -1,4 +1,4 @@
-reify_packed.scala:6: error: implementation restriction: cannot reify block of type List[_$1] that involves a type declared inside the block being reified. consider casting the return value to a suitable type.
- reify {
- ^
-one error found
+reify_packed.scala:6: error: implementation restriction: cannot reify block of type List[_$1] that involves a type declared inside the block being reified. consider casting the return value to a suitable type.
+ reify {
+ ^
+one error found
diff --git a/test/pending/neg/reify_packed.scala b/test/pending/neg/reify_packed.scala
index 2004e031d5..7bdaa41915 100644
--- a/test/pending/neg/reify_packed.scala
+++ b/test/pending/neg/reify_packed.scala
@@ -11,5 +11,5 @@ object Test extends App {
};
val toolbox = cm.mkToolBox()
- println(toolbox.runExpr(code.tree))
+ println(toolbox.eval(code.tree))
} \ No newline at end of file
diff --git a/test/pending/pos/t3943/Outer_1.java b/test/pending/pos/t3943/Outer_1.java
new file mode 100644
index 0000000000..56c8cc7f85
--- /dev/null
+++ b/test/pending/pos/t3943/Outer_1.java
@@ -0,0 +1,14 @@
+public class Outer_1<E> {
+ abstract class Inner {
+ abstract public void foo(E e);
+ }
+}
+
+class Child extends Outer_1<String> {
+ // the implicit prefix for Inner is Outer<E> instead of Outer<String>
+ public Inner getInner() {
+ return new Inner() {
+ public void foo(String e) { System.out.println("meh "+e); }
+ };
+ }
+}
diff --git a/test/pending/pos/t3943/test_2.scala b/test/pending/pos/t3943/test_2.scala
new file mode 100644
index 0000000000..a19db8b226
--- /dev/null
+++ b/test/pending/pos/t3943/test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ val x: Child = new Child
+ x.getInner.foo("meh")
+// ^
+// error: type mismatch;
+// found : java.lang.String("meh")
+// required: E
+}
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
index 5af32f38e0..e7cb9c367b 100644
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
@@ -1,6 +1,6 @@
-openImplicits are: List()
-enclosingImplicits are: List((List[Int],scala.this.Predef.implicitly[List[Int]]))
-typetag is: TypeTag[Nothing]
-openImplicits are: List()
-enclosingImplicits are: List((List[String],Test.this.bar[String]))
-typetag is: TypeTag[Nothing]
+openImplicits are: List()
+enclosingImplicits are: List((List[Int],scala.this.Predef.implicitly[List[Int]]))
+typetag is: TypeTag[Nothing]
+openImplicits are: List()
+enclosingImplicits are: List((List[String],Test.this.bar[String]))
+typetag is: TypeTag[Nothing]
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
index 26d4a45fee..15bcb581c8 100644
--- a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
@@ -1,7 +1,7 @@
import scala.reflect.macros.Context
object Impls {
- def foo[T: c.AbsTypeTag](c: Context): c.Expr[List[T]] = c.universe.reify {
+ def foo[T: c.WeakTypeTag](c: Context): c.Expr[List[T]] = c.universe.reify {
println("openImplicits are: " + c.literal(c.openImplicits.toString).splice)
println("enclosingImplicits are: " + c.literal(c.enclosingImplicits.toString).splice)
println("typetag is: " + c.literal(c.tag[T].toString).splice)
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
index d6ebb907e5..26de70cc12 100644
--- a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
+++ b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
@@ -1,11 +1,11 @@
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T, U: c.AbsTypeTag, V](c: Ctx)(implicit T: c.AbsTypeTag[T], V: c.AbsTypeTag[V]): c.Expr[Unit] = {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
Block(List(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.AbsTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(())))
}
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
index d6ebb907e5..26de70cc12 100644
--- a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
+++ b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
@@ -1,11 +1,11 @@
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[T, U: c.AbsTypeTag, V](c: Ctx)(implicit T: c.AbsTypeTag[T], V: c.AbsTypeTag[V]): c.Expr[Unit] = {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
Block(List(
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.AbsTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(())))
}
diff --git a/test/pending/run/macro-reify-array/Macros_1.scala b/test/pending/run/macro-reify-array/Macros_1.scala
index 99006c548a..f970be5caa 100644
--- a/test/pending/run/macro-reify-array/Macros_1.scala
+++ b/test/pending/run/macro-reify-array/Macros_1.scala
@@ -4,7 +4,7 @@ object Macros {
def foo[T](s: String) = macro Impls.foo[T]
object Impls {
- def foo[T: c.AbsTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
Array(s.splice)
}
}
diff --git a/test/pending/run/macro-reify-tagful-b/Macros_1.scala b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
index a14187e8a7..59dbe7157b 100644
--- a/test/pending/run/macro-reify-tagful-b/Macros_1.scala
+++ b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
@@ -4,7 +4,7 @@ object Macros {
def foo[T](s: T) = macro Impls.foo[List[T]]
object Impls {
- def foo[T: c.AbsTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
List(s.splice)
}
}
diff --git a/test/pending/run/macro-reify-tagless-b.check b/test/pending/run/macro-reify-tagless-b.check
index 49acd94ad6..61ebb4e547 100644
--- a/test/pending/run/macro-reify-tagless-b.check
+++ b/test/pending/run/macro-reify-tagless-b.check
@@ -1,3 +1,3 @@
error: macro must not return an expr that contains free type variables (namely: T). have you forgot to use c.TypeTag annotations for type parameters external to a reifee?
-
-java.lang.Error: reflective compilation has failed
+
+java.lang.Error: reflective compilation has failed
diff --git a/test/pending/run/macro-reify-tagless-b/Test_2.scala b/test/pending/run/macro-reify-tagless-b/Test_2.scala
index 4649963d05..ebd35ffe47 100644
--- a/test/pending/run/macro-reify-tagless-b/Test_2.scala
+++ b/test/pending/run/macro-reify-tagless-b/Test_2.scala
@@ -9,5 +9,5 @@ object Test extends App {
val rhs = Apply(Select(Ident("Macros"), newTermName("foo")), List(Literal(Constant("hello world"))))
val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
- println(cm.mkToolBox().runExpr(tree))
+ println(cm.mkToolBox().eval(tree))
}
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-notags.check b/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
index db8a19d5f7..53acc9184c 100644
--- a/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
+++ b/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
@@ -1,2 +1,2 @@
-TypeTag(C[T])
-TypeTag(List[C[T]])
+TypeTag(C[T])
+TypeTag(List[C[T]])
diff --git a/test/pending/run/reflection-mem-eval.scala b/test/pending/run/reflection-mem-eval.scala
new file mode 100644
index 0000000000..9045c44cd6
--- /dev/null
+++ b/test/pending/run/reflection-mem-eval.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ lazy val tb = {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ cm.mkToolBox()
+ }
+
+ override def maxDelta = 10
+ override def calcsPerIter = 3
+ override def calc() {
+ var snippet = """
+ trait A { type T <: A }
+ trait B { type T <: B }
+ def foo[T](x: List[T]) = x
+ foo(List(new A {}, new B {}))
+ """.trim
+ snippet = snippet + "\n" + (List.fill(50)(snippet.split("\n").last) mkString "\n")
+ tb.eval(tb.parse(snippet))
+ }
+} \ No newline at end of file
diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala
index 565bb03b2f..0f126c8c91 100644
--- a/test/pending/run/reify_closure2b.scala
+++ b/test/pending/run/reify_closure2b.scala
@@ -12,7 +12,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(y).fun.tree)
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala
index 0d806b148b..54ac52ba0b 100644
--- a/test/pending/run/reify_closure3b.scala
+++ b/test/pending/run/reify_closure3b.scala
@@ -14,7 +14,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(y).fun.tree)
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala
index 1a349de072..34f707e092 100644
--- a/test/pending/run/reify_closure4b.scala
+++ b/test/pending/run/reify_closure4b.scala
@@ -14,7 +14,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(y).fun.tree)
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala
index 3e5e1bd328..0e506bf7b5 100644
--- a/test/pending/run/reify_closure5b.scala
+++ b/test/pending/run/reify_closure5b.scala
@@ -12,7 +12,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(ys).fun.tree)
+ val dyn = toolbox.eval(new Foo(ys).fun.tree)
dyn.asInstanceOf[Int => Int]
}
diff --git a/test/pending/run/reify_closure9a.scala b/test/pending/run/reify_closure9a.scala
index dddfa3f6c2..f39ff1e2f3 100644
--- a/test/pending/run/reify_closure9a.scala
+++ b/test/pending/run/reify_closure9a.scala
@@ -10,7 +10,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(y).fun.tree)
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
dyn.asInstanceOf[Int]
}
diff --git a/test/pending/run/reify_closure9b.scala b/test/pending/run/reify_closure9b.scala
index df9db9b806..a6920b4e02 100644
--- a/test/pending/run/reify_closure9b.scala
+++ b/test/pending/run/reify_closure9b.scala
@@ -10,7 +10,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(new Foo(y).fun.tree)
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
dyn.asInstanceOf[Int]
}
diff --git a/test/pending/run/reify_closures11.scala b/test/pending/run/reify_closures11.scala
index 4c21033cbc..9156208b40 100644
--- a/test/pending/run/reify_closures11.scala
+++ b/test/pending/run/reify_closures11.scala
@@ -10,7 +10,7 @@ object Test extends App {
}
val toolbox = cm.mkToolBox()
- val dyn = toolbox.runExpr(fun().tree)
+ val dyn = toolbox.eval(fun().tree)
val foo = dyn.asInstanceOf[Int]
println(foo)
} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09c.scala b/test/pending/run/reify_newimpl_09c.scala
index e2f4a4923a..6bde36328e 100644
--- a/test/pending/run/reify_newimpl_09c.scala
+++ b/test/pending/run/reify_newimpl_09c.scala
@@ -14,7 +14,7 @@ object Test extends App {
val code = foo[Int]
println(code.tree.freeTypes)
val W = code.tree.freeTypes(2)
- cm.mkToolBox().runExpr(code.tree, Map(W -> definitions.IntTpe))
+ cm.mkToolBox().eval(code.tree, Map(W -> definitions.IntTpe))
println(code.eval)
}
} \ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_46.scala b/test/pending/run/reify_newimpl_46.scala
index 239c53953b..d063be0486 100644
--- a/test/pending/run/reify_newimpl_46.scala
+++ b/test/pending/run/reify_newimpl_46.scala
@@ -8,7 +8,7 @@ object Test extends App {
val code = reify{val x: T[String] = null; println("ima worx"); x}.tree
println(code.freeTypes)
val T = code.freeTypes(0)
- cm.mkToolBox().runExpr(code, Map(T -> definitions.ListClass.asType))
+ cm.mkToolBox().eval(code, Map(T -> definitions.ListClass.asType))
}
new C[List]
diff --git a/test/pending/run/reify_newimpl_53.scala b/test/pending/run/reify_newimpl_53.scala
index a73a0b94cb..54fa4bec1d 100644
--- a/test/pending/run/reify_newimpl_53.scala
+++ b/test/pending/run/reify_newimpl_53.scala
@@ -11,7 +11,7 @@ object Test extends App {
}.tree
println(code.freeTypes)
val T = code.freeTypes(0)
- cm.mkToolBox().runExpr(code, Map(T -> definitions.StringClass.asType))
+ cm.mkToolBox().eval(code, Map(T -> definitions.StringClass.asType))
}
new C[String]
diff --git a/test/pending/run/t5943b1.scala b/test/pending/run/t5943b1.scala
new file mode 100644
index 0000000000..0d54718753
--- /dev/null
+++ b/test/pending/run/t5943b1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+// pending until https://issues.scala-lang.org/browse/SI-6393 is fixed
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("math.sqrt(4.0)")
+ println(tb.typeCheck(expr))
+} \ No newline at end of file
diff --git a/test/pending/run/t5943b2.scala b/test/pending/run/t5943b2.scala
new file mode 100644
index 0000000000..85299d9f12
--- /dev/null
+++ b/test/pending/run/t5943b2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+// pending until https://issues.scala-lang.org/browse/SI-6393 is fixed
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("math.sqrt(4.0)")
+ println(tb.eval(expr))
+} \ No newline at end of file
diff --git a/test/scaladoc/resources/implicits-known-type-classes-res.scala b/test/scaladoc/resources/implicits-known-type-classes-res.scala
index 9ad652947d..77c91aafce 100644
--- a/test/scaladoc/resources/implicits-known-type-classes-res.scala
+++ b/test/scaladoc/resources/implicits-known-type-classes-res.scala
@@ -6,7 +6,8 @@ package scala.test.scaladoc.implicits.typeclasses {
class A[T]
object A {
import language.implicitConversions
- import scala.reflect.{ClassTag, TypeTag}
+ import scala.reflect.ClassTag
+ import scala.reflect.runtime.universe.TypeTag
implicit def convertNumeric [T: Numeric] (a: A[T]) = new B(implicitly[Numeric[T]])
implicit def convertIntegral [T: Integral] (a: A[T]) = new B(implicitly[Integral[T]])
implicit def convertFractional [T: Fractional] (a: A[T]) = new B(implicitly[Fractional[T]])
diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala
index 679d0b0dce..09a52a4334 100644
--- a/test/scaladoc/resources/links.scala
+++ b/test/scaladoc/resources/links.scala
@@ -34,24 +34,29 @@ package scala.test.scaladoc.links {
/**
* Links to the trait:
- * - [[scala.test.scaladoc.links.Target!.T trait Target -> type T]]
- * - [[test.scaladoc.links.Target!.S trait Target -> type S]]
- * - [[scaladoc.links.Target!.foo(Int)* trait Target -> def foo]]
- * - [[links.Target!.bar trait Target -> def bar]]
- * - [[[[Target!.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
- * - [[Target$.T object Target -> type T]]
- * - [[Target$.S object Target -> type S]]
- * - [[Target$.foo(Str* object Target -> def foo]]
- * - [[Target$.bar object Target -> def bar]]
- * - [[[[Target$.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
- * - [[Target.onlyInObject object Target -> def foo]] (should find the object)
- * - [[Target$.C object Target -> class C]] (should link directly to C, not as a member)
- * - [[Target!.C trait Target -> class C]] (should link directly to C, not as a member)
- * - [[Target$.baz(links\.C)* object Target -> def baz]] (should use dots in prefix)
- * - [[Target!.baz(links\.C)* trait Target -> def baz]] (should use dots in prefix)
- * - [[localMethod object TEST -> localMethod]] (should use the current template to resolve link instead of inTpl, that's the package)
+ * - [[scala.test.scaladoc.links.Target$ object Test]]
+ * - [[scala.test package scala.test]]
+ * - [[scala.test.scaladoc.links.Target!.T trait Target -> type T]]
+ * - [[test.scaladoc.links.Target!.S trait Target -> type S]]
+ * - [[scaladoc.links.Target!.foo(i:Int)* trait Target -> def foo]]
+ * - [[links.Target!.bar trait Target -> def bar]]
+ * - [[[[Target!.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
+ * - [[Target$.T object Target -> type T]]
+ * - [[Target$.S object Target -> type S]]
+ * - [[Target$.foo(z:Str* object Target -> def foo]]
+ * - [[Target$.bar object Target -> def bar]]
+ * - [[[[Target$.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
+ * - [[Target.onlyInObject object Target -> def foo]] (should find the object)
+ * - [[Target$.C object Target -> class C]] (should link directly to C, not as a member)
+ * - [[Target!.C trait Target -> class C]] (should link directly to C, not as a member)
+ * - [[Target$.baz(c:scala\.test\.scaladoc\.links\.C)* object Target -> def baz]] (should use dots in prefix)
+ * - [[Target!.baz(c:scala\.test\.scaladoc\.links\.C)* trait Target -> def baz]] (should use dots in prefix)
+ * - [[localMethod object TEST -> localMethod]] (should use the current template to resolve link instead of inTpl, that's the package)
+ * - [[#localMethod object TEST -> localMethod]] (should exercise Java-style links to empty members)
+ * - [[ImOutside class ImOutside (check correct lookup in EmptyPackage)]]
*/
object TEST {
def localMethod = 3
}
}
+class ImOutside \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-known-type-classes.scala b/test/scaladoc/run/implicits-known-type-classes.scala
index 9f4ca372b0..471a1a219d 100644
--- a/test/scaladoc/run/implicits-known-type-classes.scala
+++ b/test/scaladoc/run/implicits-known-type-classes.scala
@@ -24,7 +24,7 @@ object Test extends ScaladocModelTest {
val A = base._class("A")
- for (conversion <- A.conversions if !conversion.isCommonConversion) {
+ for (conversion <- A.conversions if !conversion.isHiddenConversion) {
assert(conversion.constraints.length == 1, conversion.constraints.length + " == 1 (in " + conversion + ")")
assert(conversion.constraints.head.isInstanceOf[KnownTypeClassConstraint],
conversion.constraints.head + " is not a known type class constraint!")
diff --git a/test/scaladoc/run/links.scala b/test/scaladoc/run/links.scala
index 40ce6368ce..de359539cf 100644
--- a/test/scaladoc/run/links.scala
+++ b/test/scaladoc/run/links.scala
@@ -22,7 +22,7 @@ object Test extends ScaladocModelTest {
val memberLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToMember])
val templateLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToTpl])
- assert(memberLinks == 14, memberLinks + " == 14 (the member links in object TEST)")
- assert(templateLinks == 2, templateLinks + " == 2 (the template links in object TEST)")
+ assert(memberLinks == 15, memberLinks + " == 15 (the member links in object TEST)")
+ assert(templateLinks == 5, templateLinks + " == 5 (the template links in object TEST)")
}
} \ No newline at end of file
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 64f62a103d..4fe6dd67a0 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -3,7 +3,8 @@
# Library to push and pull binary artifacts from a remote repository using CURL.
-remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+remote_urlpush="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
libraryJar="$(pwd)/lib/scala-library.jar"
desired_ext=".desired.sha1"
push_jar="$(pwd)/tools/push.jar"
@@ -35,8 +36,8 @@ curlUpload() {
local data=$2
local user=$3
local password=$4
- local url="${remote_urlbase}/${remote_location}"
- java -jar $push_jar "$data" "$remote_location" "$user" "$password"
+ local url="${remote_urlpush}/${remote_location}"
+ java -jar $push_jar "$data" "$url" "$user" "$password"
if (( $? != 0 )); then
echo "Error uploading $data to $url"
echo "$url"
@@ -77,7 +78,7 @@ pushJarFile() {
pushd $jar_dir >/dev/null
local version=$(makeJarSha $jar_name)
local remote_uri=${version}${jar#$basedir}
- echo " Pushing to ${remote_urlbase}/${remote_uri} ..."
+ echo " Pushing to ${remote_urlpush}/${remote_uri} ..."
echo " $curl"
curlUpload $remote_uri $jar_name $user $pw
echo " Making new sha1 file ...."
@@ -136,7 +137,7 @@ pushJarFiles() {
local user=$2
local password=$3
# TODO - ignore target/ and build/
- local jarFiles="$(find ${basedir}/lib -name "*.jar") $(find ${basedir}/test/files -name "*.jar")"
+ local jarFiles="$(find ${basedir}/lib -name "*.jar") $(find ${basedir}/test/files -name "*.jar") $(find ${basedir}/tools -name "*.jar")"
local changed="no"
for jar in $jarFiles; do
local valid=$(isJarFileValid $jar)
@@ -188,7 +189,9 @@ pullJarFileToCache() {
rm -f "$cache_loc"
fi
if [[ ! -f "$cache_loc" ]]; then
- curlDownload $cache_loc ${remote_urlbase}/${uri}
+ # Note: After we follow up with JFrog, we should check the more stable raw file server first
+ # before hitting the more flaky artifactory.
+ curlDownload $cache_loc ${remote_urlget}/${uri}
if test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
echo "Trouble downloading $uri. Please try pull-binary-libs again when your internet connection is stable."
exit 2
diff --git a/tools/class-dump b/tools/class-dump
new file mode 100755
index 0000000000..06a7e5acbc
--- /dev/null
+++ b/tools/class-dump
@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+
+JAVAP_OPTS="-private"
+
+[[ -n "$1" ]] && ( cd "$(dirname "$1")" && javap $JAVAP_OPTS "$(basename "${1%%.class}")" )
diff --git a/tools/get-scala-commit-date.bat b/tools/get-scala-commit-date.bat
index 2a75073633..e169de1b04 100644
--- a/tools/get-scala-commit-date.bat
+++ b/tools/get-scala-commit-date.bat
@@ -1,9 +1,9 @@
-@echo off
-for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
-if defined FOUND (
- bash "%~dp0\get-scala-commit-date"
-) else (
- rem echo this script does not work with cmd.exe. please, install bash
- echo unknown
- exit 1
+@echo off
+for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
+if defined FOUND (
+ bash "%~dp0\get-scala-commit-date"
+) else (
+ rem echo this script does not work with cmd.exe. please, install bash
+ echo unknown
+ exit 1
) \ No newline at end of file
diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat
index 390e2d99d0..1eaffc0a15 100644
--- a/tools/get-scala-commit-sha.bat
+++ b/tools/get-scala-commit-sha.bat
@@ -1,9 +1,9 @@
-@echo off
-for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
-if defined FOUND (
- bash "%~dp0\get-scala-commit-sha"
-) else (
- rem echo this script does not work with cmd.exe. please, install bash
- echo unknown
- exit 1
+@echo off
+for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
+if defined FOUND (
+ bash "%~dp0\get-scala-commit-sha"
+) else (
+ rem echo this script does not work with cmd.exe. please, install bash
+ echo unknown
+ exit 1
) \ No newline at end of file
diff --git a/tools/jar-dump b/tools/jar-dump
new file mode 100755
index 0000000000..166441b330
--- /dev/null
+++ b/tools/jar-dump
@@ -0,0 +1,4 @@
+#!/bin/sh
+#
+
+jar tf "$1" | sort
diff --git a/tools/push.jar.desired.sha1 b/tools/push.jar.desired.sha1
index 53d566f525..63e6a47372 100644
--- a/tools/push.jar.desired.sha1
+++ b/tools/push.jar.desired.sha1
@@ -1 +1 @@
-de5d3eb21a732e4bce44c283ccfbd1ed94bfeaed ?push.jar
+a1883f4304d5aa65e1f6ee6aad5900c62dd81079 ?push.jar