summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--README.rst13
-rwxr-xr-x[-rw-r--r--]build.xml465
-rw-r--r--gitignore.SAMPLE7
-rw-r--r--lib/forkjoin.jar.desired.sha12
-rw-r--r--lib/jline.jar.desired.sha12
-rw-r--r--lib/scala-compiler-src.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--lib/scala-reflect-src.jar.desired.sha12
-rw-r--r--lib/scala-reflect.jar.desired.sha12
-rw-r--r--project/Build.scala334
-rw-r--r--project/Layers.scala116
-rw-r--r--project/Packaging.scala129
-rw-r--r--project/Partest.scala140
-rw-r--r--project/Release.scala30
-rw-r--r--project/RemoteDependencies.scala53
-rw-r--r--project/Sametest.scala63
-rw-r--r--project/ScalaBuildKeys.scala23
-rw-r--r--project/ScalaToolRunner.scala21
-rw-r--r--project/ShaResolve.scala148
-rw-r--r--project/Testing.scala41
-rw-r--r--project/VerifyClassLoad.scala46
-rw-r--r--project/Versions.scala142
-rw-r--r--project/plugins.sbt9
-rw-r--r--project/project/Build.scala7
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala29
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala2
-rw-r--r--src/actors/scala/actors/scheduler/TerminationService.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala5
-rw-r--r--src/asm/scala/tools/asm/tree/InsnList.java8
-rw-r--r--src/build/maven/maven-deploy.xml6
-rw-r--r--src/build/maven/scala-actors-pom.xml3
-rw-r--r--src/build/maven/scala-library-pom.xml3
-rw-r--r--src/build/maven/scala-reflect-pom.xml4
-rw-r--r--src/build/maven/scala-swing-pom.xml3
-rw-r--r--src/build/pack.xml4
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala33
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala113
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Resolvers.scala91
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala193
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Aliases.scala (renamed from src/compiler/scala/reflect/macros/runtime/Aliases.scala)6
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Context.scala29
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Enclosures.scala36
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Evals.scala (renamed from src/compiler/scala/reflect/macros/runtime/Evals.scala)4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/ExprUtils.scala (renamed from src/compiler/scala/reflect/macros/runtime/ExprUtils.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/FrontEnds.scala (renamed from src/compiler/scala/reflect/macros/runtime/FrontEnds.scala)4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Infrastructure.scala (renamed from src/compiler/scala/reflect/macros/runtime/Infrastructure.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala (renamed from src/compiler/scala/reflect/macros/runtime/Names.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Parsers.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Reifiers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Reifiers.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Synthetics.scala (renamed from src/compiler/scala/reflect/macros/runtime/Synthetics.scala)21
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Traces.scala (renamed from src/compiler/scala/reflect/macros/runtime/Traces.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Typers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Typers.scala)27
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Context.scala29
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Enclosures.scala36
-rw-r--r--src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala31
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala74
-rw-r--r--src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala33
-rw-r--r--src/compiler/scala/reflect/macros/runtime/package.scala5
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala71
-rw-r--r--src/compiler/scala/reflect/macros/util/Traces.scala2
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala23
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala2
-rw-r--r--src/compiler/scala/reflect/reify/package.scala8
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala8
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala2
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala2
-rw-r--r--src/compiler/scala/tools/ant/Same.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Break.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Make.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala3
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl5
-rw-r--r--src/compiler/scala/tools/cmd/Demo.scala3
-rw-r--r--src/compiler/scala/tools/cmd/Interpolation.scala3
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala29
-rw-r--r--src/compiler/scala/tools/cmd/package.scala3
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala5
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala8
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala3
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala91
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala2
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala3
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala274
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala92
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala35
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala9
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala66
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala109
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala25
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala54
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Lexer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/io/PrettyWriter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala52
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/package.scala4
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala11
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala5
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala8
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala51
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala15
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala640
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala185
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala352
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala215
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala68
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala78
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala19
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala151
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala246
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala42
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala42
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala286
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala790
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala196
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala947
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala696
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala30
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala59
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala101
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala45
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala28
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala1622
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala23
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala20
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala13
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala3
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala80
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala24
-rw-r--r--src/compiler/scala/tools/reflect/WrappedProperties.scala1
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala154
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala3
-rw-r--r--src/compiler/scala/tools/util/VerifyClass.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala32
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala8
-rw-r--r--src/eclipse/README.md3
-rw-r--r--src/eclipse/test-junit/.classpath12
-rw-r--r--src/eclipse/test-junit/.project35
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java2605
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java356
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java20
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala10
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala44
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/REPL.scala5
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RangePositions.scala5
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala3
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/Tester.scala5
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala9
-rw-r--r--src/jline/build.sbt10
-rwxr-xr-xsrc/jline/manual-test.sh5
-rw-r--r--src/jline/project/build.properties1
-rw-r--r--src/jline/project/plugins.sbt3
-rw-r--r--src/jline/project/plugins/build.sbt5
-rw-r--r--src/library/scala/Boolean.scala6
-rw-r--r--src/library/scala/Byte.scala4
-rw-r--r--src/library/scala/Char.scala4
-rw-r--r--src/library/scala/Double.scala6
-rw-r--r--src/library/scala/Float.scala4
-rw-r--r--src/library/scala/Int.scala4
-rw-r--r--src/library/scala/Long.scala4
-rw-r--r--src/library/scala/LowPriorityImplicits.scala95
-rw-r--r--src/library/scala/Option.scala4
-rw-r--r--src/library/scala/PartialFunction.scala2
-rw-r--r--src/library/scala/Predef.scala80
-rw-r--r--src/library/scala/Short.scala4
-rw-r--r--src/library/scala/StringContext.scala2
-rw-r--r--src/library/scala/Unit.scala3
-rw-r--r--src/library/scala/annotation/cloneable.scala15
-rw-r--r--src/library/scala/collection/BitSet.scala3
-rw-r--r--src/library/scala/collection/BitSetLike.scala19
-rw-r--r--src/library/scala/collection/BufferedIterator.scala3
-rw-r--r--src/library/scala/collection/CustomParallelizable.scala3
-rw-r--r--src/library/scala/collection/DefaultMap.scala3
-rw-r--r--src/library/scala/collection/GenIterable.scala3
-rw-r--r--src/library/scala/collection/GenIterableLike.scala3
-rw-r--r--src/library/scala/collection/GenIterableView.scala3
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala3
-rw-r--r--src/library/scala/collection/GenMap.scala3
-rw-r--r--src/library/scala/collection/GenMapLike.scala3
-rw-r--r--src/library/scala/collection/GenSeq.scala3
-rw-r--r--src/library/scala/collection/GenSeqLike.scala3
-rw-r--r--src/library/scala/collection/GenSeqView.scala3
-rw-r--r--src/library/scala/collection/GenSeqViewLike.scala3
-rw-r--r--src/library/scala/collection/GenSet.scala3
-rw-r--r--src/library/scala/collection/GenSetLike.scala3
-rw-r--r--src/library/scala/collection/GenTraversable.scala3
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala3
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala3
-rw-r--r--src/library/scala/collection/GenTraversableView.scala3
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala3
-rw-r--r--src/library/scala/collection/IndexedSeq.scala3
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala3
-rw-r--r--src/library/scala/collection/Iterable.scala3
-rw-r--r--src/library/scala/collection/IterableProxy.scala3
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala3
-rw-r--r--src/library/scala/collection/IterableView.scala3
-rw-r--r--src/library/scala/collection/IterableViewLike.scala3
-rw-r--r--src/library/scala/collection/JavaConversions.scala48
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala16
-rw-r--r--src/library/scala/collection/LinearSeq.scala3
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala3
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala3
-rw-r--r--src/library/scala/collection/Map.scala3
-rw-r--r--src/library/scala/collection/MapLike.scala4
-rw-r--r--src/library/scala/collection/MapProxy.scala3
-rw-r--r--src/library/scala/collection/MapProxyLike.scala3
-rw-r--r--src/library/scala/collection/Parallel.scala3
-rw-r--r--src/library/scala/collection/Parallelizable.scala3
-rw-r--r--src/library/scala/collection/Searching.scala8
-rw-r--r--src/library/scala/collection/Seq.scala3
-rw-r--r--src/library/scala/collection/SeqExtractors.scala3
-rw-r--r--src/library/scala/collection/SeqProxy.scala3
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala3
-rw-r--r--src/library/scala/collection/SeqView.scala3
-rw-r--r--src/library/scala/collection/SeqViewLike.scala3
-rw-r--r--src/library/scala/collection/Set.scala4
-rw-r--r--src/library/scala/collection/SetLike.scala4
-rw-r--r--src/library/scala/collection/SetProxy.scala3
-rw-r--r--src/library/scala/collection/SetProxyLike.scala3
-rw-r--r--src/library/scala/collection/SortedMap.scala3
-rw-r--r--src/library/scala/collection/SortedMapLike.scala3
-rw-r--r--src/library/scala/collection/SortedSet.scala3
-rw-r--r--src/library/scala/collection/SortedSetLike.scala3
-rw-r--r--src/library/scala/collection/Traversable.scala3
-rw-r--r--src/library/scala/collection/TraversableLike.scala3
-rw-r--r--src/library/scala/collection/TraversableOnce.scala3
-rw-r--r--src/library/scala/collection/TraversableProxy.scala3
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala3
-rw-r--r--src/library/scala/collection/TraversableView.scala3
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala3
-rw-r--r--src/library/scala/collection/concurrent/BasicNode.java11
-rw-r--r--src/library/scala/collection/concurrent/CNodeBase.java18
-rw-r--r--src/library/scala/collection/concurrent/Gen.java9
-rw-r--r--src/library/scala/collection/concurrent/INodeBase.java20
-rw-r--r--src/library/scala/collection/concurrent/MainNode.java21
-rw-r--r--src/library/scala/collection/concurrent/Map.scala3
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala16
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala3
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala3
-rw-r--r--src/library/scala/collection/convert/Decorators.scala3
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala3
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala3
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala3
-rw-r--r--src/library/scala/collection/convert/package.scala3
-rw-r--r--src/library/scala/collection/generic/BitOperations.scala3
-rw-r--r--src/library/scala/collection/generic/BitSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/CanBuildFrom.scala3
-rw-r--r--src/library/scala/collection/generic/CanCombineFrom.scala3
-rw-r--r--src/library/scala/collection/generic/ClassTagTraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/Clearable.scala3
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala5
-rw-r--r--src/library/scala/collection/generic/GenMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenSeqFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenericClassTagCompanion.scala3
-rw-r--r--src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala3
-rw-r--r--src/library/scala/collection/generic/GenericOrderedCompanion.scala3
-rw-r--r--src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala4
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala4
-rw-r--r--src/library/scala/collection/generic/GenericSeqCompanion.scala3
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala57
-rw-r--r--src/library/scala/collection/generic/Growable.scala9
-rwxr-xr-xsrc/library/scala/collection/generic/HasNewBuilder.scala3
-rw-r--r--src/library/scala/collection/generic/HasNewCombiner.scala4
-rw-r--r--src/library/scala/collection/generic/ImmutableMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ImmutableSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/IndexedSeqFactory.scala3
-rw-r--r--src/library/scala/collection/generic/IsSeqLike.scala5
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala3
-rw-r--r--src/library/scala/collection/generic/IsTraversableOnce.scala3
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala4
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/MutableMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/OrderedTraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala4
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala3
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala4
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala3
-rw-r--r--src/library/scala/collection/generic/Signalling.scala4
-rw-r--r--src/library/scala/collection/generic/Sizing.scala4
-rw-r--r--src/library/scala/collection/generic/SliceInterval.scala3
-rw-r--r--src/library/scala/collection/generic/Sorted.scala3
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala3
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala4
-rw-r--r--src/library/scala/collection/generic/package.scala3
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala3
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala3
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala4
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala3
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala3
-rw-r--r--src/library/scala/collection/immutable/LinearSeq.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala6
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala42
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala3
-rw-r--r--src/library/scala/collection/immutable/Map.scala3
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala3
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala3
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala3
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala3
-rw-r--r--src/library/scala/collection/immutable/Queue.scala3
-rw-r--r--src/library/scala/collection/immutable/Range.scala10
-rw-r--r--src/library/scala/collection/immutable/Seq.scala3
-rw-r--r--src/library/scala/collection/immutable/Set.scala3
-rw-r--r--src/library/scala/collection/immutable/SetProxy.scala3
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala3
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala3
-rw-r--r--src/library/scala/collection/immutable/Stack.scala3
-rw-r--r--src/library/scala/collection/immutable/Stream.scala11
-rw-r--r--src/library/scala/collection/immutable/StreamView.scala3
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala3
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala24
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala3
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala3
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala3
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala3
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala3
-rw-r--r--src/library/scala/collection/immutable/WrappedString.scala3
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala1
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala3
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala3
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala8
-rw-r--r--src/library/scala/collection/mutable/Buffer.scala3
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala3
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala3
-rw-r--r--src/library/scala/collection/mutable/DefaultEntry.scala3
-rw-r--r--src/library/scala/collection/mutable/DefaultMapModel.scala3
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala3
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala3
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala5
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/HashEntry.scala3
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala5
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala3
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala3
-rw-r--r--src/library/scala/collection/mutable/History.scala3
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala3
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala3
-rw-r--r--src/library/scala/collection/mutable/IndexedSeq.scala3
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala3
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala3
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala3
-rw-r--r--src/library/scala/collection/mutable/Iterable.scala3
-rw-r--r--src/library/scala/collection/mutable/LazyBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/LinearSeq.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedEntry.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala3
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala5
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala3
-rw-r--r--src/library/scala/collection/mutable/Map.scala5
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala5
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala3
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala3
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala3
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala3
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala3
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala5
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala3
-rw-r--r--src/library/scala/collection/mutable/Queue.scala3
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/RevertibleHistory.scala3
-rw-r--r--src/library/scala/collection/mutable/Seq.scala3
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala3
-rw-r--r--src/library/scala/collection/mutable/Set.scala3
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala3
-rw-r--r--src/library/scala/collection/mutable/SetProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala3
-rw-r--r--src/library/scala/collection/mutable/Stack.scala3
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala11
-rw-r--r--src/library/scala/collection/mutable/Subscriber.scala3
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala3
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala5
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala3
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala3
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala3
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala3
-rw-r--r--src/library/scala/collection/mutable/Traversable.scala3
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala3
-rw-r--r--src/library/scala/collection/mutable/Undoable.scala3
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala3
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala3
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala3
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala3
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala15
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala7
-rw-r--r--src/library/scala/collection/parallel/ParIterableView.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala3
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeq.scala29
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqView.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSet.scala50
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala3
-rw-r--r--src/library/scala/collection/parallel/PreciseSplitter.scala3
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala45
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala3
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala3
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala15
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala28
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala5
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala28
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala28
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala40
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala20
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala7
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/package.scala3
-rw-r--r--src/library/scala/collection/parallel/package.scala7
-rw-r--r--src/library/scala/collection/script/Location.scala3
-rw-r--r--src/library/scala/collection/script/Message.scala3
-rw-r--r--src/library/scala/collection/script/Scriptable.scala3
-rw-r--r--src/library/scala/compat/Platform.scala5
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala4
-rw-r--r--src/library/scala/concurrent/Future.scala50
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala2
-rw-r--r--src/library/scala/concurrent/Promise.scala6
-rw-r--r--src/library/scala/concurrent/SyncVar.scala2
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala2
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala41
-rw-r--r--src/library/scala/concurrent/duration/package.scala14
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala32
-rw-r--r--src/library/scala/concurrent/impl/Future.scala2
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala85
-rw-r--r--src/library/scala/concurrent/package.scala4
-rw-r--r--src/library/scala/deprecatedInheritance.scala3
-rw-r--r--src/library/scala/io/BufferedSource.scala2
-rw-r--r--src/library/scala/io/Codec.scala4
-rw-r--r--src/library/scala/io/Position.scala3
-rw-r--r--src/library/scala/io/Source.scala3
-rw-r--r--src/library/scala/math/BigDecimal.scala21
-rw-r--r--src/library/scala/math/BigInt.scala7
-rw-r--r--src/library/scala/math/Equiv.scala3
-rw-r--r--src/library/scala/math/Fractional.scala3
-rw-r--r--src/library/scala/math/Integral.scala3
-rw-r--r--src/library/scala/math/Numeric.scala21
-rw-r--r--src/library/scala/math/Ordered.scala3
-rw-r--r--src/library/scala/math/PartialOrdering.scala3
-rw-r--r--src/library/scala/math/PartiallyOrdered.scala3
-rw-r--r--src/library/scala/math/ScalaNumber.java2
-rw-r--r--src/library/scala/math/ScalaNumericConversions.scala3
-rw-r--r--src/library/scala/math/package.scala36
-rw-r--r--src/library/scala/reflect/ClassManifestDeprecatedApis.scala3
-rw-r--r--src/library/scala/reflect/Manifest.scala3
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala8
-rw-r--r--src/library/scala/reflect/NoManifest.scala3
-rw-r--r--src/library/scala/reflect/OptManifest.scala3
-rw-r--r--src/library/scala/reflect/package.scala16
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala5
-rw-r--r--src/library/scala/runtime/BooleanRef.java3
-rw-r--r--src/library/scala/runtime/Boxed.scala4
-rw-r--r--src/library/scala/runtime/ByteRef.java3
-rw-r--r--src/library/scala/runtime/CharRef.java3
-rw-r--r--src/library/scala/runtime/DoubleRef.java3
-rw-r--r--src/library/scala/runtime/FloatRef.java3
-rw-r--r--src/library/scala/runtime/IntRef.java3
-rw-r--r--src/library/scala/runtime/LongRef.java3
-rw-r--r--src/library/scala/runtime/MethodCache.scala4
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala4
-rw-r--r--src/library/scala/runtime/Nothing$.scala4
-rw-r--r--src/library/scala/runtime/Null$.scala4
-rw-r--r--src/library/scala/runtime/ObjectRef.java3
-rw-r--r--src/library/scala/runtime/RichBoolean.scala4
-rw-r--r--src/library/scala/runtime/RichByte.scala4
-rw-r--r--src/library/scala/runtime/RichChar.scala4
-rw-r--r--src/library/scala/runtime/RichException.scala3
-rw-r--r--src/library/scala/runtime/RichFloat.scala8
-rw-r--r--src/library/scala/runtime/RichInt.scala4
-rw-r--r--src/library/scala/runtime/RichLong.scala4
-rw-r--r--src/library/scala/runtime/RichShort.scala4
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala3
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala21
-rw-r--r--src/library/scala/runtime/ShortRef.java3
-rw-r--r--src/library/scala/runtime/StringAdd.scala4
-rw-r--r--src/library/scala/runtime/StringFormat.scala4
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala4
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala4
-rwxr-xr-xsrc/library/scala/runtime/VolatileBooleanRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileByteRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileCharRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileDoubleRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileFloatRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileIntRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileLongRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileObjectRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileShortRef.java3
-rw-r--r--src/library/scala/runtime/WorksheetSupport.scala2
-rw-r--r--src/library/scala/sys/BooleanProp.scala3
-rw-r--r--src/library/scala/sys/PropImpl.scala3
-rw-r--r--src/library/scala/sys/ShutdownHookThread.scala3
-rw-r--r--src/library/scala/sys/SystemProperties.scala3
-rw-r--r--src/library/scala/sys/process/BasicIO.scala31
-rw-r--r--src/library/scala/sys/process/Process.scala3
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala3
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala3
-rw-r--r--src/library/scala/sys/process/ProcessIO.scala3
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala7
-rw-r--r--src/library/scala/sys/process/ProcessLogger.scala3
-rw-r--r--src/library/scala/util/DynamicVariable.scala3
-rw-r--r--src/library/scala/util/Either.scala3
-rw-r--r--src/library/scala/util/MurmurHash.scala6
-rw-r--r--src/library/scala/util/Properties.scala24
-rw-r--r--src/library/scala/util/Random.scala3
-rw-r--r--src/library/scala/util/Try.scala3
-rw-r--r--src/library/scala/util/control/Breaks.scala3
-rw-r--r--src/library/scala/util/control/ControlThrowable.scala3
-rw-r--r--src/library/scala/util/control/Exception.scala3
-rw-r--r--src/library/scala/util/control/NonFatal.scala3
-rw-r--r--src/library/scala/util/control/TailCalls.scala3
-rw-r--r--src/library/scala/util/hashing/ByteswapHashing.scala13
-rw-r--r--src/library/scala/util/hashing/Hashing.scala3
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala5
-rw-r--r--src/library/scala/util/hashing/package.scala9
-rw-r--r--src/library/scala/util/logging/ConsoleLogger.scala26
-rw-r--r--src/library/scala/util/logging/Logged.scala33
-rw-r--r--src/library/scala/util/matching/Regex.scala17
-rw-r--r--src/library/scala/util/parsing/ast/AbstractSyntax.scala32
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala347
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala9
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Lexical.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Scanners.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/testing/RegexTest.scala27
-rw-r--r--src/library/scala/util/parsing/combinator/testing/Tester.scala44
-rw-r--r--src/library/scala/util/parsing/combinator/token/StdTokens.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/token/Tokens.scala3
-rw-r--r--src/library/scala/util/parsing/input/CharArrayReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/CharSequenceReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/NoPosition.scala3
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala3
-rw-r--r--src/library/scala/util/parsing/input/PagedSeqReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/Position.scala3
-rw-r--r--src/library/scala/util/parsing/input/Positional.scala3
-rw-r--r--src/library/scala/util/parsing/input/Reader.scala3
-rw-r--r--src/library/scala/util/parsing/input/StreamReader.scala5
-rw-r--r--src/library/scala/util/parsing/json/JSON.scala4
-rw-r--r--src/library/scala/util/parsing/json/Lexer.scala4
-rw-r--r--src/library/scala/util/parsing/json/Parser.scala10
-rw-r--r--src/library/scala/xml/Atom.scala3
-rw-r--r--src/library/scala/xml/Attribute.scala3
-rw-r--r--src/library/scala/xml/Comment.scala3
-rw-r--r--src/library/scala/xml/Document.scala3
-rwxr-xr-xsrc/library/scala/xml/Elem.scala3
-rw-r--r--src/library/scala/xml/EntityRef.scala3
-rw-r--r--src/library/scala/xml/Equality.scala3
-rw-r--r--src/library/scala/xml/Group.scala3
-rw-r--r--src/library/scala/xml/MalformedAttributeException.scala3
-rw-r--r--src/library/scala/xml/MetaData.scala3
-rw-r--r--src/library/scala/xml/NamespaceBinding.scala3
-rwxr-xr-xsrc/library/scala/xml/Node.scala3
-rw-r--r--src/library/scala/xml/NodeBuffer.scala3
-rw-r--r--src/library/scala/xml/NodeSeq.scala3
-rw-r--r--src/library/scala/xml/Null.scala3
-rw-r--r--src/library/scala/xml/PCData.scala3
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala3
-rwxr-xr-xsrc/library/scala/xml/PrettyPrinter.scala3
-rw-r--r--src/library/scala/xml/ProcInstr.scala3
-rw-r--r--src/library/scala/xml/QNode.scala3
-rw-r--r--src/library/scala/xml/SpecialNode.scala3
-rw-r--r--src/library/scala/xml/Text.scala3
-rw-r--r--src/library/scala/xml/TextBuffer.scala3
-rw-r--r--src/library/scala/xml/TopScope.scala3
-rw-r--r--src/library/scala/xml/TypeSymbol.scala3
-rw-r--r--src/library/scala/xml/Unparsed.scala3
-rw-r--r--src/library/scala/xml/UnprefixedAttribute.scala3
-rwxr-xr-xsrc/library/scala/xml/Utility.scala3
-rwxr-xr-xsrc/library/scala/xml/XML.scala3
-rw-r--r--src/library/scala/xml/Xhtml.scala3
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala5
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala3
-rw-r--r--src/library/scala/xml/dtd/DTD.scala3
-rw-r--r--src/library/scala/xml/dtd/Decl.scala3
-rw-r--r--src/library/scala/xml/dtd/DocType.scala3
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala3
-rw-r--r--src/library/scala/xml/dtd/ExternalID.scala3
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala3
-rw-r--r--src/library/scala/xml/dtd/Tokens.scala3
-rw-r--r--src/library/scala/xml/dtd/ValidationException.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/Base.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/BaseBerrySethi.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/DetWordAutom.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/Inclusion.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/NondetWordAutom.scala5
-rw-r--r--src/library/scala/xml/dtd/impl/PointedHedgeExp.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/SubsetConstruction.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/SyntaxError.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/WordBerrySethi.scala3
-rw-r--r--src/library/scala/xml/dtd/impl/WordExp.scala3
-rwxr-xr-xsrc/library/scala/xml/factory/Binder.scala3
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala13
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala3
-rw-r--r--src/library/scala/xml/factory/XMLLoader.scala3
-rw-r--r--src/library/scala/xml/include/CircularIncludeException.scala3
-rw-r--r--src/library/scala/xml/include/UnavailableResourceException.scala3
-rw-r--r--src/library/scala/xml/include/XIncludeException.scala3
-rw-r--r--src/library/scala/xml/include/sax/EncodingHeuristics.scala3
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala3
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala5
-rwxr-xr-xsrc/library/scala/xml/parsing/ConstructingHandler.scala3
-rw-r--r--src/library/scala/xml/parsing/ConstructingParser.scala3
-rwxr-xr-xsrc/library/scala/xml/parsing/DefaultMarkupHandler.scala3
-rw-r--r--src/library/scala/xml/parsing/ExternalSources.scala3
-rw-r--r--src/library/scala/xml/parsing/FactoryAdapter.scala3
-rw-r--r--src/library/scala/xml/parsing/FatalError.scala3
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupHandler.scala11
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala3
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala3
-rw-r--r--src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala3
-rw-r--r--src/library/scala/xml/parsing/TokenTests.scala3
-rw-r--r--src/library/scala/xml/parsing/ValidatingMarkupHandler.scala21
-rw-r--r--src/library/scala/xml/parsing/XhtmlEntities.scala3
-rw-r--r--src/library/scala/xml/parsing/XhtmlParser.scala3
-rw-r--r--src/library/scala/xml/persistent/CachedFileStorage.scala12
-rw-r--r--src/library/scala/xml/persistent/Index.scala3
-rw-r--r--src/library/scala/xml/persistent/SetStorage.scala3
-rw-r--r--src/library/scala/xml/pull/XMLEvent.scala3
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala3
-rw-r--r--src/library/scala/xml/pull/package.scala3
-rw-r--r--src/library/scala/xml/transform/BasicTransformer.scala3
-rw-r--r--src/library/scala/xml/transform/RewriteRule.scala3
-rw-r--r--src/library/scala/xml/transform/RuleTransformer.scala5
-rw-r--r--src/partest/scala/tools/partest/BytecodeTest.scala35
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala2
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala60
-rw-r--r--src/partest/scala/tools/partest/IcodeTest.scala4
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala13
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala341
-rw-r--r--src/partest/scala/tools/partest/TestKinds.scala67
-rw-r--r--src/partest/scala/tools/partest/TestState.scala65
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala25
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala182
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala33
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala332
-rw-r--r--src/partest/scala/tools/partest/nest/DirectCompiler.scala105
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala62
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala48
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala130
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala17
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/Runner.scala901
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala764
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala29
-rw-r--r--src/partest/scala/tools/partest/nest/StreamCapture.scala53
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala80
-rw-r--r--src/partest/scala/tools/partest/package.scala177
-rw-r--r--src/partest/scala/tools/partest/utils/Properties.scala1
-rw-r--r--src/reflect/scala/reflect/api/Annotations.scala34
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala3
-rw-r--r--src/reflect/scala/reflect/api/Constants.scala9
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala5
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala9
-rw-r--r--src/reflect/scala/reflect/api/ImplicitTags.scala203
-rw-r--r--src/reflect/scala/reflect/api/Importers.scala3
-rw-r--r--src/reflect/scala/reflect/api/JavaMirrors.scala3
-rw-r--r--src/reflect/scala/reflect/api/JavaUniverse.scala3
-rw-r--r--src/reflect/scala/reflect/api/Mirror.scala9
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala7
-rw-r--r--src/reflect/scala/reflect/api/Names.scala23
-rw-r--r--src/reflect/scala/reflect/api/Position.scala3
-rw-r--r--src/reflect/scala/reflect/api/Positions.scala10
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala3
-rw-r--r--src/reflect/scala/reflect/api/Scopes.scala17
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala3
-rw-r--r--src/reflect/scala/reflect/api/StandardNames.scala3
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala51
-rw-r--r--src/reflect/scala/reflect/api/TagInterop.scala3
-rw-r--r--src/reflect/scala/reflect/api/TreeCreator.scala3
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala341
-rw-r--r--src/reflect/scala/reflect/api/TypeCreator.scala3
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala38
-rw-r--r--src/reflect/scala/reflect/api/Types.scala11
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala8
-rw-r--r--src/reflect/scala/reflect/api/package.scala7
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala3
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala29
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala14
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala3
-rw-r--r--src/reflect/scala/reflect/internal/CapturedVariables.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Chars.scala3
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala27
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala207
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala80
-rw-r--r--src/reflect/scala/reflect/internal/FatalError.scala3
-rw-r--r--src/reflect/scala/reflect/internal/FlagSets.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala14
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala667
-rw-r--r--src/reflect/scala/reflect/internal/InfoTransformers.scala3
-rw-r--r--src/reflect/scala/reflect/internal/JMethodOrConstructor.scala47
-rw-r--r--src/reflect/scala/reflect/internal/JavaAccFlags.scala84
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala179
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala5
-rw-r--r--src/reflect/scala/reflect/internal/MissingRequirementError.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Mode.scala117
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Phase.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala14
-rw-r--r--src/reflect/scala/reflect/internal/PrivateWithin.scala36
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala51
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala3
-rw-r--r--src/reflect/scala/reflect/internal/StdCreators.scala3
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala14
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala19
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala174
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala36
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala153
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala37
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala223
-rw-r--r--src/reflect/scala/reflect/internal/Variance.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala3
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala3
-rw-r--r--src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala11
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala7
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleFormat.scala3
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala6
-rw-r--r--src/reflect/scala/reflect/internal/settings/AbsSettings.scala3
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala3
-rw-r--r--src/reflect/scala/reflect/internal/tpe/CommonOwners.scala3
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala79
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala342
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala7
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala52
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala3
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala37
-rw-r--r--src/reflect/scala/reflect/internal/transform/RefChecks.scala5
-rw-r--r--src/reflect/scala/reflect/internal/transform/Transforms.scala3
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala14
-rw-r--r--src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala (renamed from src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala)6
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala5
-rw-r--r--src/reflect/scala/reflect/internal/util/HashSet.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/Origins.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala51
-rw-r--r--src/reflect/scala/reflect/internal/util/RangePosition.scala5
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala (renamed from src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala)6
-rw-r--r--src/reflect/scala/reflect/internal/util/Set.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala9
-rw-r--r--src/reflect/scala/reflect/internal/util/StringOps.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/TableDef.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/ThreeValues.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/TriState.scala26
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala450
-rw-r--r--src/reflect/scala/reflect/internal/util/package.scala11
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala9
-rw-r--r--src/reflect/scala/reflect/io/Directory.scala5
-rw-r--r--src/reflect/scala/reflect/io/File.scala9
-rw-r--r--src/reflect/scala/reflect/io/FileOperationException.scala3
-rw-r--r--src/reflect/scala/reflect/io/IOStats.scala32
-rw-r--r--src/reflect/scala/reflect/io/NoAbstractFile.scala8
-rw-r--r--src/reflect/scala/reflect/io/Path.scala36
-rw-r--r--src/reflect/scala/reflect/io/PlainFile.scala11
-rw-r--r--src/reflect/scala/reflect/io/Streamable.scala3
-rw-r--r--src/reflect/scala/reflect/io/VirtualDirectory.scala3
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala14
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala7
-rw-r--r--src/reflect/scala/reflect/macros/Aliases.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Context.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala15
-rw-r--r--src/reflect/scala/reflect/macros/Evals.scala5
-rw-r--r--src/reflect/scala/reflect/macros/ExprUtils.scala3
-rw-r--r--src/reflect/scala/reflect/macros/FrontEnds.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Infrastructure.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Macro.scala39
-rw-r--r--src/reflect/scala/reflect/macros/Names.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Reifiers.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Synthetics.scala3
-rw-r--r--src/reflect/scala/reflect/macros/TreeBuilder.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala24
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala3
-rw-r--r--src/reflect/scala/reflect/macros/package.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala223
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala12
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectSetup.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala18
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/TwoWayCache.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala5
-rw-r--r--src/repl/scala/tools/nsc/MainGenericRunner.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala35
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala18
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/SimpleReader.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala42
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala83
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala11
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala41
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala81
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala30
-rw-r--r--src/scalacheck/org/scalacheck/ScalaCheckFramework.scala92
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala207
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala5
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala4
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala79
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala25
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala14
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala14
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Page.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala8
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala5
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala29
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js17
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala40
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala4
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala165
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala26
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala13
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala34
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala10
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala7
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala3
-rw-r--r--src/scalap/scala/tools/scalap/MetaParser.scala3
-rw-r--r--src/swing/scala/swing/Action.scala7
-rw-r--r--src/swing/scala/swing/ComboBox.scala1
-rw-r--r--src/swing/scala/swing/Component.scala4
-rw-r--r--src/swing/scala/swing/GridBagPanel.scala1
-rw-r--r--src/swing/scala/swing/Orientable.scala5
-rw-r--r--src/swing/scala/swing/Oriented.scala1
-rw-r--r--src/swing/scala/swing/Slider.scala2
-rw-r--r--src/swing/scala/swing/Swing.scala1
-rw-r--r--src/swing/scala/swing/package.scala3
-rw-r--r--starr.number2
-rwxr-xr-xtest/build-partest.xml24
-rwxr-xr-xtest/files/continuations-run/basics.scala2
-rw-r--r--test/files/continuations-run/function1.scala4
-rw-r--r--test/files/continuations-run/function4.scala4
-rw-r--r--test/files/continuations-run/function5.scala4
-rw-r--r--test/files/continuations-run/function6.scala4
-rw-r--r--test/files/continuations-run/ifelse0.scala4
-rw-r--r--test/files/continuations-run/ifelse1.scala4
-rw-r--r--test/files/continuations-run/ifelse2.scala4
-rw-r--r--test/files/continuations-run/ifelse3.scala4
-rw-r--r--test/files/continuations-run/ifelse4.scala4
-rw-r--r--test/files/continuations-run/infer1.scala4
-rw-r--r--test/files/continuations-run/match0.scala4
-rw-r--r--test/files/continuations-run/match1.scala4
-rw-r--r--test/files/continuations-run/patvirt.scala2
-rw-r--r--test/files/continuations-run/t1820.scala4
-rw-r--r--test/files/continuations-run/t1821.scala4
-rw-r--r--test/files/continuations-run/t3199b.scala4
-rw-r--r--test/files/continuations-run/t3223.scala4
-rw-r--r--test/files/continuations-run/t3225.scala2
-rw-r--r--test/files/continuations-run/t5314.check6
-rw-r--r--test/files/continuations-run/t5472.check14
-rw-r--r--test/files/continuations-run/t5472.scala5
-rw-r--r--test/files/continuations-run/t5506.scala2
-rw-r--r--test/files/continuations-run/t5538.check1
-rw-r--r--test/files/continuations-run/t5538.scala6
-rw-r--r--test/files/continuations-run/trycatch0.scala6
-rw-r--r--test/files/continuations-run/trycatch1.check8
-rw-r--r--test/files/continuations-run/trycatch1.scala10
-rw-r--r--test/files/continuations-run/while0.scala4
-rw-r--r--test/files/continuations-run/while1.scala4
-rw-r--r--test/files/continuations-run/while2.scala4
-rw-r--r--test/files/filters3
-rw-r--r--test/files/jvm/actor-exceptions.scala4
-rw-r--r--test/files/jvm/actor-executor.scala5
-rw-r--r--test/files/jvm/actor-executor2.scala6
-rw-r--r--test/files/jvm/actor-executor3.scala6
-rw-r--r--test/files/jvm/actor-getstate.scala6
-rw-r--r--test/files/jvm/actor-link-getstate.scala9
-rw-r--r--test/files/jvm/actor-looping.scala3
-rw-r--r--test/files/jvm/actor-normal-exit.scala4
-rw-r--r--test/files/jvm/actor-receivewithin.scala5
-rw-r--r--test/files/jvm/actor-sync-send-timeout.scala5
-rw-r--r--test/files/jvm/actor-termination.scala3
-rw-r--r--test/files/jvm/actor-uncaught-exception.scala3
-rw-r--r--test/files/jvm/actor-uncaught-exception2.check4
-rw-r--r--test/files/jvm/actor-uncaught-exception2.scala4
-rw-r--r--test/files/jvm/annotations.scala3
-rw-r--r--test/files/jvm/daemon-actor-termination.scala4
-rw-r--r--test/files/jvm/deprecation.check3
-rw-r--r--test/files/jvm/duration-tck.scala2
-rw-r--r--test/files/jvm/future-alarm.scala4
-rw-r--r--test/files/jvm/future-awaitall-zero.scala6
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala19
-rw-r--r--test/files/jvm/future-spec/main.scala10
-rw-r--r--test/files/jvm/future-termination.scala4
-rw-r--r--test/files/jvm/reactor-exceptionOnSend.scala5
-rw-r--r--test/files/jvm/reactor-producer-consumer.scala4
-rw-r--r--test/files/jvm/reactor.scala16
-rw-r--r--test/files/jvm/replyablereactor.scala5
-rw-r--r--test/files/jvm/replyablereactor2.scala5
-rw-r--r--test/files/jvm/replyablereactor3.scala5
-rw-r--r--test/files/jvm/replyablereactor4.scala5
-rw-r--r--test/files/jvm/replyreactor-react-sender.scala6
-rw-r--r--test/files/jvm/replyreactor.scala4
-rw-r--r--test/files/jvm/scala-concurrent-tck.check3
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala2
-rw-r--r--test/files/jvm/scheduler-adapter.scala5
-rw-r--r--test/files/jvm/serialization.scala4
-rw-r--r--test/files/jvm/stringbuilder.scala1
-rw-r--r--test/files/jvm/t1449.scala9
-rw-r--r--test/files/jvm/t1948.scala6
-rw-r--r--test/files/jvm/t2163/t2163.scala7
-rw-r--r--test/files/jvm/t2359.scala4
-rw-r--r--test/files/jvm/t2530.scala6
-rw-r--r--test/files/jvm/t3102.scala7
-rw-r--r--test/files/jvm/t3356.scala5
-rw-r--r--test/files/jvm/t3365.scala5
-rw-r--r--test/files/jvm/t3407.scala4
-rw-r--r--test/files/jvm/t3412-channel.scala4
-rw-r--r--test/files/jvm/t3412.scala4
-rw-r--r--test/files/jvm/t3470.scala4
-rw-r--r--test/files/jvm/t3838.scala4
-rw-r--r--test/files/jvm/t560bis.scala2
-rw-r--r--test/files/jvm/t6941/Analyzed_1.flags1
-rw-r--r--test/files/jvm/t7006.check28
-rw-r--r--test/files/jvm/t7146.scala2
-rw-r--r--test/files/jvm/try-type-tests.scala3
-rw-r--r--test/files/jvm/unreachable/Foo_1.scala4
-rw-r--r--test/files/jvm/xml01.scala3
-rwxr-xr-xtest/files/jvm/xml03syntax.check1
-rw-r--r--test/files/jvm/xml03syntax.scala2
-rw-r--r--test/files/neg/abstract-explaintypes.check4
-rw-r--r--test/files/neg/choices.check2
-rw-r--r--test/files/neg/choices.flags2
-rw-r--r--test/files/neg/delayed-init-ref.check12
-rw-r--r--test/files/neg/delayed-init-ref.flags1
-rw-r--r--test/files/neg/delayed-init-ref.scala42
-rw-r--r--test/files/neg/javac-error.check10
-rw-r--r--test/files/neg/javac-error.flags1
-rw-r--r--test/files/neg/javac-error/J.java5
-rw-r--r--test/files/neg/javac-error/SUT_5.scala5
-rw-r--r--test/files/neg/macro-divergence-controlled.check4
-rw-r--r--test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala23
-rw-r--r--test/files/neg/macro-divergence-controlled/Test_2.scala3
-rw-r--r--test/files/neg/macro-invalidimpl-a.check4
-rw-r--r--test/files/neg/macro-invalidimpl-a/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala9
-rw-r--r--test/files/neg/macro-invalidimpl-b.check4
-rw-r--r--test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala9
-rw-r--r--test/files/neg/macro-invalidimpl-c.check4
-rw-r--r--test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala9
-rw-r--r--test/files/neg/macro-invalidimpl-d.check4
-rw-r--r--test/files/neg/macro-invalidimpl-d/Impls_1.scala7
-rw-r--r--test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala7
-rw-r--r--test/files/neg/macro-invalidimpl-e.check13
-rw-r--r--test/files/neg/macro-invalidimpl-e/Impls_1.scala6
-rw-r--r--test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala9
-rw-r--r--test/files/neg/macro-invalidimpl-f.check7
-rw-r--r--test/files/neg/macro-invalidimpl-f/Impls_1.scala11
-rw-r--r--test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala9
-rw-r--r--test/files/neg/macro-invalidimpl-g.check7
-rw-r--r--test/files/neg/macro-invalidimpl-g/Impls_1.scala11
-rw-r--r--test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidimpl-h.check4
-rw-r--r--test/files/neg/macro-invalidimpl-h/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidimpl-i.check4
-rw-r--r--test/files/neg/macro-invalidimpl-i/Impls_1.scala7
-rw-r--r--test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala5
-rw-r--r--test/files/neg/macro-invalidimpl.check51
-rw-r--r--test/files/neg/macro-invalidimpl.flags (renamed from test/files/neg/macro-invalidimpl-a.flags)0
-rw-r--r--test/files/neg/macro-invalidimpl/Impls_1.scala40
-rw-r--r--test/files/neg/macro-invalidimpl/Macros_Test_2.scala55
-rw-r--r--test/files/neg/macro-invalidret-nontree/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala6
-rw-r--r--test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidret.check13
-rw-r--r--test/files/neg/macro-invalidret.flags (renamed from test/files/neg/macro-invalidimpl-b.flags)0
-rw-r--r--test/files/neg/macro-invalidret/Impls_1.scala7
-rw-r--r--test/files/neg/macro-invalidret/Macros_Test_2.scala10
-rw-r--r--test/files/neg/macro-invalidshape-a.check5
-rw-r--r--test/files/neg/macro-invalidshape-a/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidshape-a/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidshape-b.check5
-rw-r--r--test/files/neg/macro-invalidshape-b/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidshape-b/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidshape-c.check9
-rw-r--r--test/files/neg/macro-invalidshape-c/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidshape-c/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidshape-d.check8
-rw-r--r--test/files/neg/macro-invalidshape-d.flags1
-rw-r--r--test/files/neg/macro-invalidshape-d/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidshape-d/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidshape.check20
-rw-r--r--test/files/neg/macro-invalidshape.flags (renamed from test/files/neg/macro-invalidimpl-c.flags)0
-rw-r--r--test/files/neg/macro-invalidshape/Impls_1.scala (renamed from test/files/neg/macro-invalidimpl-b/Impls_1.scala)0
-rw-r--r--test/files/neg/macro-invalidshape/Macros_Test_2.scala17
-rw-r--r--test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala9
-rw-r--r--test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala8
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala3
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala19
-rw-r--r--test/files/neg/macro-invalidsig-implicit-params/Test_2.scala4
-rw-r--r--test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala9
-rw-r--r--test/files/neg/macro-invalidsig-params-badargc/Test_2.scala4
-rw-r--r--test/files/neg/macro-invalidsig-params-badtype.check6
-rw-r--r--test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala2
-rw-r--r--test/files/neg/macro-invalidsig-params-badtype/Test_2.scala4
-rw-r--r--test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala9
-rw-r--r--test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala4
-rw-r--r--test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala9
-rw-r--r--test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala4
-rw-r--r--test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala6
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala8
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala11
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala11
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala12
-rw-r--r--test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala11
-rw-r--r--test/files/neg/macro-invalidsig.check82
-rw-r--r--test/files/neg/macro-invalidsig.flags (renamed from test/files/neg/macro-invalidimpl-d.flags)0
-rw-r--r--test/files/neg/macro-invalidsig/Impls_1.scala88
-rw-r--r--test/files/neg/macro-invalidsig/Macros_Test_2.scala83
-rw-r--r--test/files/neg/macro-invalidusage-badargs.check20
-rw-r--r--test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala12
-rw-r--r--test/files/neg/macro-invalidusage-badbounds.check (renamed from test/files/neg/macro-invalidusage-badbounds-a.check)0
-rw-r--r--test/files/neg/macro-invalidusage-badbounds.flags (renamed from test/files/neg/macro-invalidimpl-e.flags)0
-rw-r--r--test/files/neg/macro-invalidusage-badbounds/Impls_1.scala (renamed from test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala)0
-rw-r--r--test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala (renamed from test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala)0
-rw-r--r--test/files/neg/macro-invalidusage-badtargs-untyped.check18
-rw-r--r--test/files/neg/macro-invalidusage-badtargs-untyped.flags (renamed from test/files/neg/macro-invalidimpl-f.flags)0
-rw-r--r--test/files/neg/macro-invalidusage-badtargs.check22
-rw-r--r--test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala12
-rw-r--r--test/files/neg/macro-invalidusage-nontypeable.check4
-rw-r--r--test/files/neg/macro-invalidusage-nontypeable.flags (renamed from test/files/neg/macro-invalidimpl-g.flags)0
-rw-r--r--test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala13
-rw-r--r--test/files/neg/macro-invalidusage-nontypeable/Test_2.scala (renamed from test/files/run/macro-def-path-dependent-b/Test_2.scala)2
-rw-r--r--test/files/neg/macro-invalidusage-presuper.check4
-rw-r--r--test/files/neg/macro-invalidusage-presuper.flags (renamed from test/files/neg/macro-invalidimpl-h.flags)0
-rw-r--r--test/files/neg/macro-invalidusage-presuper/Impls_1.scala5
-rw-r--r--test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala3
-rw-r--r--test/files/neg/macro-qmarkqmarkqmark.check13
-rw-r--r--test/files/neg/macro-qmarkqmarkqmark.scala13
-rw-r--r--test/files/neg/macro-without-xmacros-a.check6
-rw-r--r--test/files/neg/macro-without-xmacros-b.check6
-rw-r--r--test/files/neg/no-implicit-to-anyref.check8
-rw-r--r--test/files/neg/raw-types-stubs.check11
-rw-r--r--test/files/neg/raw-types-stubs/M_1.java3
-rw-r--r--test/files/neg/raw-types-stubs/Raw_2.java4
-rw-r--r--test/files/neg/raw-types-stubs/S_3.scala1
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.check2
-rw-r--r--test/files/neg/t414.check2
-rw-r--r--test/files/neg/t4158.check16
-rw-r--r--test/files/neg/t4515.check4
-rw-r--r--test/files/neg/t4584.check6
-rw-r--r--test/files/neg/t4727.check8
-rw-r--r--test/files/neg/t4879.check4
-rw-r--r--test/files/neg/t5510.check6
-rw-r--r--test/files/neg/t5580b.check6
-rw-r--r--test/files/neg/t5580b.scala (renamed from test/files/pos/t5580b.scala)6
-rw-r--r--test/files/neg/t5663-badwarneq.check34
-rw-r--r--test/files/neg/t5663-badwarneq.scala18
-rw-r--r--test/files/neg/t5689.check2
-rw-r--r--test/files/neg/t5856.check2
-rw-r--r--test/files/neg/t6040.check2
-rw-r--r--test/files/neg/t6138.check7
-rw-r--r--test/files/neg/t6138.scala5
-rw-r--r--test/files/neg/t6162-inheritance.check10
-rw-r--r--test/files/neg/t6162-inheritance/defn.scala10
-rw-r--r--test/files/neg/t6162-inheritance/usage.scala10
-rw-r--r--test/files/neg/t6406-regextract.check9
-rw-r--r--test/files/neg/t6534.check7
-rw-r--r--test/files/neg/t6771b.check6
-rw-r--r--test/files/neg/t6771b.scala16
-rw-r--r--test/files/neg/t6815.check5
-rw-r--r--test/files/neg/t6815.scala17
-rw-r--r--test/files/neg/t6889.check7
-rw-r--r--test/files/neg/t6889.scala18
-rw-r--r--test/files/neg/t6952.check4
-rw-r--r--test/files/neg/t696.check10
-rw-r--r--test/files/neg/t696.scala7
-rw-r--r--test/files/neg/t7110.check6
-rw-r--r--test/files/neg/t7110.flags1
-rw-r--r--test/files/neg/t7110.scala6
-rw-r--r--test/files/neg/t7157.check73
-rw-r--r--test/files/neg/t7157/Impls_Macros_1.scala32
-rw-r--r--test/files/neg/t7157/Test_2.scala63
-rw-r--r--test/files/neg/t7289.check4
-rw-r--r--test/files/neg/t7289.scala39
-rw-r--r--test/files/neg/t7289_status_quo.check22
-rw-r--r--test/files/neg/t7289_status_quo.scala23
-rw-r--r--test/files/neg/t7324.check4
-rw-r--r--test/files/neg/t7324.scala57
-rw-r--r--test/files/neg/t7325.check19
-rw-r--r--test/files/neg/t7325.scala25
-rw-r--r--test/files/neg/t7330.check5
-rw-r--r--test/files/neg/t7330.scala5
-rw-r--r--test/files/neg/t7369.check15
-rw-r--r--test/files/neg/t7369.flags1
-rw-r--r--test/files/neg/t7369.scala43
-rw-r--r--test/files/neg/t7385.check10
-rw-r--r--test/files/neg/t7385.scala7
-rw-r--r--test/files/neg/t7388.check4
-rw-r--r--test/files/neg/t7388.scala1
-rw-r--r--test/files/neg/t7473.check7
-rw-r--r--test/files/neg/t7473.scala7
-rw-r--r--test/files/neg/t7494-after-terminal.check (renamed from test/pending/neg/plugin-after-terminal.check)0
-rw-r--r--test/files/neg/t7494-after-terminal/ThePlugin.scala (renamed from test/pending/neg/plugin-after-terminal/src/ThePlugin.scala)0
-rw-r--r--test/files/neg/t7494-after-terminal/sample_2.flags1
-rw-r--r--test/files/neg/t7494-after-terminal/sample_2.scala6
-rw-r--r--test/files/neg/t7494-after-terminal/scalac-plugin.xml (renamed from test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml)2
-rw-r--r--test/files/neg/t7494-before-parser.check (renamed from test/pending/neg/plugin-before-parser.check)0
-rw-r--r--test/files/neg/t7494-before-parser/ThePlugin.scala (renamed from test/pending/neg/plugin-before-parser/src/ThePlugin.scala)0
-rw-r--r--test/files/neg/t7494-before-parser/sample_2.flags1
-rw-r--r--test/files/neg/t7494-before-parser/sample_2.scala6
-rw-r--r--test/files/neg/t7494-before-parser/scalac-plugin.xml (renamed from test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml)0
-rw-r--r--test/files/neg/t7494-cyclic-dependency.check1
-rw-r--r--test/files/neg/t7494-multi-right-after.check (renamed from test/pending/neg/plugin-multiple-rafter.check)3
-rw-r--r--test/files/neg/t7494-multi-right-after/ThePlugin.scala (renamed from test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala)0
-rw-r--r--test/files/neg/t7494-multi-right-after/sample_2.flags1
-rw-r--r--test/files/neg/t7494-multi-right-after/sample_2.scala6
-rw-r--r--test/files/neg/t7494-multi-right-after/scalac-plugin.xml (renamed from test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml)2
-rw-r--r--test/files/neg/t7494-no-options.check33
-rw-r--r--test/files/neg/t7494-no-options/ploogin_1.scala31
-rw-r--r--test/files/neg/t7494-no-options/sample_2.flags1
-rw-r--r--test/files/neg/t7494-no-options/sample_2.scala6
-rw-r--r--test/files/neg/t7494-no-options/scalac-plugin.xml4
-rw-r--r--test/files/neg/t7494-right-after-before.check1
-rw-r--r--test/files/neg/t7494-right-after-before/ThePlugin.scala (renamed from test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala)0
-rw-r--r--test/files/neg/t7494-right-after-before/sample_2.flags1
-rw-r--r--test/files/neg/t7494-right-after-before/sample_2.scala6
-rw-r--r--test/files/neg/t7494-right-after-before/scalac-plugin.xml (renamed from test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml)2
-rw-r--r--test/files/neg/t7494-right-after-terminal.check (renamed from test/pending/neg/plugin-rightafter-terminal.check)0
-rw-r--r--test/files/neg/t7494-right-after-terminal/ThePlugin.scala (renamed from test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala)0
-rw-r--r--test/files/neg/t7494-right-after-terminal/sample_2.flags1
-rw-r--r--test/files/neg/t7494-right-after-terminal/sample_2.scala6
-rw-r--r--test/files/neg/t7494-right-after-terminal/scalac-plugin.xml (renamed from test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml)2
-rw-r--r--test/files/neg/t7507.check4
-rw-r--r--test/files/neg/t7507.scala7
-rw-r--r--test/files/neg/t7509.check12
-rw-r--r--test/files/neg/t7509.scala4
-rw-r--r--test/files/neg/t7519.check7
-rw-r--r--test/files/neg/t7519.scala18
-rw-r--r--test/files/neg/tailrec-2.check2
-rw-r--r--test/files/neg/unicode-unterminated-quote.check4
-rw-r--r--test/files/neg/volatile_no_override.check5
-rw-r--r--test/files/neg/volatile_no_override.scala14
-rw-r--r--test/files/pos/lub-dealias-widen.scala34
-rw-r--r--test/files/pos/macro-qmarkqmarkqmark.check (renamed from test/files/run/macro-expand-tparams-bounds-a.check)0
-rw-r--r--test/files/pos/macro-qmarkqmarkqmark.scala7
-rwxr-xr-xtest/files/pos/spec-t6286.scala10
-rw-r--r--test/files/pos/t1648.scala4
-rw-r--r--test/files/pos/t1786.scala19
-rw-r--r--test/files/pos/t2613.scala11
-rw-r--r--test/files/pos/t3688-redux.scala8
-rw-r--r--test/files/pos/t3943/Client_2.scala7
-rw-r--r--test/files/pos/t3943/Outer_1.java14
-rw-r--r--test/files/pos/t4365/a_1.scala18
-rw-r--r--test/files/pos/t4365/b_1.scala22
-rw-r--r--test/files/pos/t5022.scala22
-rw-r--r--test/files/pos/t5459.scala (renamed from test/pending/pos/t5459.scala)0
-rw-r--r--test/files/pos/t5692c.check (renamed from test/files/run/macro-expand-tparams-bounds-b.check)0
-rw-r--r--test/files/pos/t5692c.scala4
-rw-r--r--test/files/pos/t5886.scala18
-rw-r--r--test/files/pos/t6091.flags1
-rw-r--r--test/files/pos/t6091.scala10
-rw-r--r--test/files/pos/t6162-inheritance.flags1
-rw-r--r--test/files/pos/t6162-inheritance.scala (renamed from test/files/neg/t6162-inheritance.scala)3
-rw-r--r--test/files/pos/t6386.scala5
-rw-r--r--test/files/pos/t6675.flags1
-rw-r--r--test/files/pos/t6675.scala20
-rw-r--r--test/files/pos/t6771.flags1
-rw-r--r--test/files/pos/t6771.scala9
-rw-r--r--test/files/pos/t6815.scala17
-rw-r--r--test/files/pos/t6815_import.scala16
-rw-r--r--test/files/pos/t7200b.scala50
-rw-r--r--test/files/pos/t7264/A_1.scala11
-rw-r--r--test/files/pos/t7264/B_2.scala7
-rw-r--r--test/files/pos/t7315.flags1
-rw-r--r--test/files/pos/t7315.scala4
-rw-r--r--test/files/pos/t7329.scala1
-rw-r--r--test/files/pos/t7364/BadList.java3
-rw-r--r--test/files/pos/t7364/UseIt.scala4
-rw-r--r--test/files/pos/t7364b/BadList_1.java3
-rw-r--r--test/files/pos/t7364b/UseIt_2.scala5
-rw-r--r--test/files/pos/t7369.flags1
-rw-r--r--test/files/pos/t7369.scala37
-rw-r--r--test/files/pos/t7377/Client_2.scala11
-rw-r--r--test/files/pos/t7377/Macro_1.scala7
-rw-r--r--test/files/pos/t7377b.scala13
-rw-r--r--test/files/pos/t7426.scala3
-rw-r--r--test/files/pos/t7427.flags1
-rw-r--r--test/files/pos/t7427.scala4
-rw-r--r--test/files/pos/t7461.check0
-rw-r--r--test/files/pos/t7461/Macros_1.scala13
-rw-r--r--test/files/pos/t7461/Test_2.scala3
-rw-r--r--test/files/pos/t7505.scala16
-rw-r--r--test/files/pos/t7516/A_1.scala9
-rw-r--r--test/files/pos/t7516/B_2.scala4
-rw-r--r--test/files/pos/t7517.scala22
-rw-r--r--test/files/pos/t7520.scala10
-rw-r--r--test/files/pos/t7532/A_1.java6
-rw-r--r--test/files/pos/t7532/B_2.scala5
-rw-r--r--test/files/pos/t7532b/A_1.scala7
-rw-r--r--test/files/pos/t7532b/B_2.scala8
-rw-r--r--test/files/pos/xlint1.flags1
-rw-r--r--test/files/pos/xlint1.scala13
-rw-r--r--test/files/presentation/callcc-interpreter.check134
-rwxr-xr-xtest/files/presentation/doc/doc.scala15
-rw-r--r--test/files/presentation/ide-bug-1000349.check68
-rw-r--r--test/files/presentation/ide-bug-1000475.check198
-rw-r--r--test/files/presentation/ide-bug-1000531.check244
-rw-r--r--test/files/presentation/implicit-member.check72
-rw-r--r--test/files/presentation/memory-leaks/MemoryLeaksTest.scala2
-rw-r--r--test/files/presentation/ping-pong.check152
-rw-r--r--test/files/presentation/random.check6
-rw-r--r--test/files/presentation/t1207.check53
-rw-r--r--test/files/presentation/t1207/Test.scala3
-rw-r--r--test/files/presentation/t1207/src/Completions.scala20
-rw-r--r--test/files/presentation/t5708.check82
-rw-r--r--test/files/presentation/visibility.check390
-rw-r--r--test/files/run/Course-2002-01.check3
-rw-r--r--test/files/run/Meter.check3
-rw-r--r--test/files/run/MeterCaseClass.check3
-rw-r--r--test/files/run/Predef.readLine.scala3
-rw-r--r--test/files/run/SymbolsTest.scala3
-rw-r--r--test/files/run/abstypetags_serialize.scala3
-rw-r--r--test/files/run/analyzerPlugins.check10
-rw-r--r--test/files/run/array-existential-bound.scala4
-rw-r--r--test/files/run/arrays.check6
-rw-r--r--test/files/run/bitsets.scala21
-rw-r--r--test/files/run/blame_eye_triple_eee-double.check (renamed from test/files/run/blame_eye_triple_eee.check)0
-rw-r--r--test/files/run/blame_eye_triple_eee-double.flags (renamed from test/files/run/blame_eye_triple_eee.flags)0
-rw-r--r--test/files/run/blame_eye_triple_eee-double.scala (renamed from test/files/run/blame_eye_triple_eee.scala)0
-rw-r--r--test/files/run/blame_eye_triple_eee-float.check9
-rw-r--r--test/files/run/blame_eye_triple_eee-float.flags1
-rw-r--r--test/files/run/blame_eye_triple_eee-float.scala61
-rw-r--r--test/files/run/bridges.scala2
-rw-r--r--test/files/run/bugs.scala2
-rw-r--r--test/files/run/classfile-format-51.scala8
-rw-r--r--test/files/run/classfile-format-52.check2
-rw-r--r--test/files/run/classfile-format-52.scala77
-rw-r--r--test/files/run/classmanifests_new_alias.scala4
-rw-r--r--test/files/run/classmanifests_new_core.scala3
-rw-r--r--test/files/run/collections.scala3
-rw-r--r--test/files/run/colltest1.scala6
-rw-r--r--test/files/run/comparable-comparator.scala3
-rw-r--r--test/files/run/compiler-asSeenFrom.scala5
-rw-r--r--test/files/run/concurrent-stream.scala45
-rw-r--r--test/files/run/contrib674.check3
-rw-r--r--test/files/run/contrib674.scala4
-rw-r--r--test/files/run/ctries-new/main.scala7
-rw-r--r--test/files/run/ctries-old/concmap.scala1
-rw-r--r--test/files/run/ctries-old/iterator.scala1
-rw-r--r--test/files/run/ctries-old/lnode.scala1
-rw-r--r--test/files/run/ctries-old/main.scala8
-rw-r--r--test/files/run/ctries-old/snapshot.scala1
-rw-r--r--test/files/run/delay-bad.check6
-rw-r--r--test/files/run/delay-good.check6
-rw-r--r--test/files/run/distinct.scala2
-rw-r--r--test/files/run/duration-coarsest.scala28
-rw-r--r--test/files/run/enrich-gentraversable.scala3
-rw-r--r--test/files/run/eta-expand-star2.check1
-rw-r--r--test/files/run/exceptions-2.check3
-rw-r--r--test/files/run/exceptions-2.scala12
-rw-r--r--test/files/run/exceptions-nest.scala30
-rw-r--r--test/files/run/exceptions.scala2
-rw-r--r--test/files/run/existentials-in-compiler.scala8
-rw-r--r--test/files/run/existentials.scala3
-rw-r--r--test/files/run/existentials3-new.scala3
-rw-r--r--test/files/run/existentials3-old.scala2
-rw-r--r--test/files/run/finally.scala18
-rw-r--r--test/files/run/genericValueClass.scala2
-rw-r--r--test/files/run/getClassTest-old.scala1
-rw-r--r--test/files/run/global-showdef.scala7
-rw-r--r--test/files/run/impconvtimes.scala2
-rw-r--r--test/files/run/implicits.scala2
-rw-r--r--test/files/run/indexedSeq.scala7
-rw-r--r--test/files/run/inner-obj-auto.scala130
-rw-r--r--test/files/run/interop_classtags_are_classmanifests.scala5
-rw-r--r--test/files/run/interop_manifests_are_classtags.scala3
-rw-r--r--test/files/run/interpolation.scala2
-rw-r--r--test/files/run/interpolationArgs.scala4
-rw-r--r--test/files/run/interpolationMultiline1.scala2
-rw-r--r--test/files/run/interpolationMultiline2.scala17
-rw-r--r--test/files/run/io-position.scala4
-rw-r--r--test/files/run/is-valid-num.scala3
-rw-r--r--test/files/run/issue192.scala2
-rw-r--r--test/files/run/iterator-from.scala4
-rw-r--r--test/files/run/iterator-iterate-lazy.scala2
-rw-r--r--test/files/run/iterators.scala2
-rw-r--r--test/files/run/json.scala8
-rw-r--r--test/files/run/kind-repl-command.check32
-rw-r--r--test/files/run/kind-repl-command.scala12
-rw-r--r--test/files/run/lazy-exprs.check8
-rw-r--r--test/files/run/lazy-locals.check6
-rwxr-xr-xtest/files/run/list_map.scala26
-rw-r--r--test/files/run/lists-run.scala4
-rw-r--r--test/files/run/literals.check4
-rw-r--r--test/files/run/literals.scala6
-rw-r--r--test/files/run/lub-visibility.check3
-rw-r--r--test/files/run/macro-bodyexpandstoimpl/Impls_1.scala2
-rw-r--r--test/files/run/macro-bundle.check3
-rw-r--r--test/files/run/macro-bundle.flags (renamed from test/files/neg/macro-invalidimpl-i.flags)0
-rw-r--r--test/files/run/macro-bundle/Impls_Macros_1.scala13
-rw-r--r--test/files/run/macro-bundle/Test_2.scala5
-rw-r--r--test/files/run/macro-def-infer-return-type-a.check1
-rw-r--r--test/files/run/macro-def-infer-return-type-a.flags1
-rw-r--r--test/files/run/macro-def-infer-return-type-a/Impls_1.scala5
-rw-r--r--test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala4
-rw-r--r--test/files/run/macro-def-infer-return-type-b.flags1
-rw-r--r--test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala10
-rw-r--r--test/files/run/macro-def-infer-return-type-b/Test_2.scala8
-rw-r--r--test/files/run/macro-def-infer-return-type-c.check1
-rw-r--r--test/files/run/macro-def-infer-return-type-c.flags1
-rw-r--r--test/files/run/macro-def-infer-return-type-c/Impls_1.scala5
-rw-r--r--test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala4
-rw-r--r--test/files/run/macro-def-infer-return-type.check (renamed from test/files/run/macro-def-infer-return-type-b.check)4
-rw-r--r--test/files/run/macro-def-infer-return-type.flags (renamed from test/files/neg/macro-invalidret-nontree.flags)0
-rw-r--r--test/files/run/macro-def-infer-return-type/Impls_1.scala14
-rw-r--r--test/files/run/macro-def-infer-return-type/Macros_Test_2.scala24
-rw-r--r--test/files/run/macro-def-path-dependent-a.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-b.check1
-rw-r--r--test/files/run/macro-def-path-dependent-b.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-c.check1
-rw-r--r--test/files/run/macro-def-path-dependent-c.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-d1.check1
-rw-r--r--test/files/run/macro-def-path-dependent-d1.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-d2.check1
-rw-r--r--test/files/run/macro-def-path-dependent-d2.flags1
-rw-r--r--test/files/run/macro-def-path-dependent-d2/Test_3.scala3
-rw-r--r--test/files/run/macro-def-path-dependent.check (renamed from test/files/run/macro-declared-in-annotation.check)0
-rw-r--r--test/files/run/macro-def-path-dependent.flags (renamed from test/files/neg/macro-invalidret-nonuniversetree.flags)0
-rw-r--r--test/files/run/macro-def-path-dependent/Dummy.scala (renamed from test/files/run/macro-def-path-dependent-a/Test_2.scala)0
-rw-r--r--test/files/run/macro-def-path-dependent/Test_1.scala (renamed from test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala)2
-rw-r--r--test/files/run/macro-def-path-dependent/Test_2.scala (renamed from test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala)2
-rw-r--r--test/files/run/macro-def-path-dependent/Test_3.scala (renamed from test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala)2
-rw-r--r--test/files/run/macro-def-path-dependent/Test_4.scala (renamed from test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala)2
-rw-r--r--test/files/run/macro-def-path-dependent/Test_5.scala (renamed from test/files/run/macro-def-path-dependent-d2/Impls_1.scala)2
-rw-r--r--test/files/run/macro-def-path-dependent/Test_6.scala (renamed from test/files/run/macro-def-path-dependent-d2/Macros_2.scala)2
-rw-r--r--test/files/run/macro-divergence-spurious.check1
-rw-r--r--test/files/run/macro-divergence-spurious/Impls_Macros_1.scala23
-rw-r--r--test/files/run/macro-divergence-spurious/Test_2.scala3
-rw-r--r--test/files/run/macro-duplicate.check3
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala3
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-view.flags1
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala5
-rw-r--r--test/files/run/macro-expand-nullary-generic.check10
-rw-r--r--test/files/run/macro-expand-nullary-generic/Impls_1.scala12
-rw-r--r--test/files/run/macro-expand-nullary-nongeneric.check10
-rw-r--r--test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala13
-rw-r--r--test/files/run/macro-expand-tparams-bounds-a.flags1
-rw-r--r--test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala8
-rw-r--r--test/files/run/macro-expand-tparams-bounds-b.flags1
-rw-r--r--test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala7
-rw-r--r--test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala10
-rw-r--r--test/files/run/macro-expand-tparams-bounds.check0
-rw-r--r--test/files/run/macro-expand-tparams-bounds.flags (renamed from test/files/neg/macro-invalidshape-a.flags)0
-rw-r--r--test/files/run/macro-expand-tparams-bounds/Impls_1.scala12
-rw-r--r--test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala12
-rw-r--r--test/files/run/macro-expand-tparams-explicit.check2
-rw-r--r--test/files/run/macro-expand-tparams-implicit.check2
-rw-r--r--test/files/run/macro-expand-tparams-only-in-impl.flags1
-rw-r--r--test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala5
-rw-r--r--test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala8
-rw-r--r--test/files/run/macro-expand-tparams-optional.flags1
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a.check4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a.flags1
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala11
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala10
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b.check2
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b.flags1
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala12
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala10
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1.check3
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1.flags1
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala13
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala11
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2.check3
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2.flags1
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala19
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala5
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1.check3
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1.flags1
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala13
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala11
-rw-r--r--test/files/run/macro-expand-tparams-prefix.check20
-rw-r--r--test/files/run/macro-expand-tparams-prefix.flags (renamed from test/files/neg/macro-invalidshape-b.flags)0
-rw-r--r--test/files/run/macro-expand-tparams-prefix/Impls_1.scala40
-rw-r--r--test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala57
-rw-r--r--test/files/run/macro-impl-relaxed.check4
-rw-r--r--test/files/run/macro-impl-relaxed/Macros_1.scala14
-rw-r--r--test/files/run/macro-impl-relaxed/Test_2.scala6
-rw-r--r--test/files/run/macro-impl-tparam-only-in-impl.flags (renamed from test/files/neg/macro-invalidshape-c.flags)0
-rw-r--r--test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala (renamed from test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala)0
-rw-r--r--test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala (renamed from test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-impl-tparam-typetag-is-optional.check (renamed from test/files/run/macro-expand-tparams-optional.check)0
-rw-r--r--test/files/run/macro-impl-tparam-typetag-is-optional.flags (renamed from test/files/neg/macro-invalidusage-badbounds-a.flags)0
-rw-r--r--test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala (renamed from test/files/run/macro-expand-tparams-optional/Impls_1.scala)0
-rw-r--r--test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala (renamed from test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-invalidusage-partialapplication-with-tparams.check2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication.check2
-rw-r--r--test/files/run/macro-sip19-revised/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-sip19/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-system-properties.check26
-rw-r--r--test/files/run/macro-system-properties.scala16
-rw-r--r--test/files/run/macro-term-declared-in-annotation.check (renamed from test/files/run/macro-def-path-dependent-a.check)0
-rw-r--r--test/files/run/macro-term-declared-in-annotation.flags (renamed from test/files/run/macro-declared-in-annotation.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-annotation/Impls_1.scala (renamed from test/files/run/macro-declared-in-annotation/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-annotation/Macros_2.scala (renamed from test/files/run/macro-declared-in-annotation/Macros_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-annotation/Test_3.scala (renamed from test/files/run/macro-declared-in-annotation/Test_3.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-anonymous.check (renamed from test/files/run/macro-declared-in-anonymous.check)0
-rw-r--r--test/files/run/macro-term-declared-in-anonymous.flags (renamed from test/files/run/macro-declared-in-anonymous.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-anonymous/Impls_1.scala (renamed from test/files/run/macro-declared-in-anonymous/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala6
-rw-r--r--test/files/run/macro-term-declared-in-block.check (renamed from test/files/run/macro-declared-in-block.check)0
-rw-r--r--test/files/run/macro-term-declared-in-block.flags (renamed from test/files/run/macro-declared-in-block.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-block/Impls_1.scala (renamed from test/files/run/macro-declared-in-block/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-block/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-block/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-class-class.check (renamed from test/files/run/macro-declared-in-class-class.check)0
-rw-r--r--test/files/run/macro-term-declared-in-class-class.flags (renamed from test/files/run/macro-declared-in-class-class.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-class-class/Impls_1.scala (renamed from test/files/run/macro-declared-in-class-class/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-class-class/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-class-object.check (renamed from test/files/run/macro-declared-in-class-object.check)0
-rw-r--r--test/files/run/macro-term-declared-in-class-object.flags (renamed from test/files/run/macro-declared-in-class-object.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-class-object/Impls_1.scala (renamed from test/files/run/macro-declared-in-class-object/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-class-object/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-class.check (renamed from test/files/run/macro-declared-in-class.check)0
-rw-r--r--test/files/run/macro-term-declared-in-class.flags (renamed from test/files/run/macro-declared-in-class.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-class/Impls_1.scala (renamed from test/files/run/macro-declared-in-class/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-class/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-class/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-default-param.check (renamed from test/files/run/macro-declared-in-default-param.check)0
-rw-r--r--test/files/run/macro-term-declared-in-default-param.flags (renamed from test/files/run/macro-declared-in-default-param.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-default-param/Impls_1.scala (renamed from test/files/run/macro-declared-in-default-param/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-default-param/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-implicit-class.check (renamed from test/files/run/macro-declared-in-implicit-class.check)0
-rw-r--r--test/files/run/macro-term-declared-in-implicit-class.flags (renamed from test/files/run/macro-declared-in-implicit-class.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala (renamed from test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-implicit-class/Test_2.scala (renamed from test/files/run/macro-declared-in-implicit-class/Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-method.check (renamed from test/files/run/macro-declared-in-method.check)0
-rw-r--r--test/files/run/macro-term-declared-in-method.flags (renamed from test/files/run/macro-declared-in-method.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-method/Impls_1.scala (renamed from test/files/run/macro-declared-in-method/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-method/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-method/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-object-class.check (renamed from test/files/run/macro-declared-in-object-class.check)0
-rw-r--r--test/files/run/macro-term-declared-in-object-class.flags (renamed from test/files/run/macro-declared-in-object-class.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-object-class/Impls_1.scala (renamed from test/files/run/macro-declared-in-object-class/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-object-class/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-object-object.check (renamed from test/files/run/macro-declared-in-object-object.check)0
-rw-r--r--test/files/run/macro-term-declared-in-object-object.flags (renamed from test/files/run/macro-declared-in-object-object.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-object-object/Impls_1.scala (renamed from test/files/run/macro-declared-in-object-object/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-object-object/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-object.check (renamed from test/files/run/macro-declared-in-object.check)0
-rw-r--r--test/files/run/macro-term-declared-in-object.flags (renamed from test/files/run/macro-declared-in-object.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-object/Impls_1.scala (renamed from test/files/run/macro-declared-in-object/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-object/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-object/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-package-object.check (renamed from test/files/run/macro-declared-in-package-object.check)0
-rw-r--r--test/files/run/macro-term-declared-in-package-object.flags (renamed from test/files/run/macro-declared-in-package-object.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-package-object/Impls_1.scala (renamed from test/files/run/macro-declared-in-package-object/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-package-object/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-refinement.check (renamed from test/files/run/macro-declared-in-refinement.check)0
-rw-r--r--test/files/run/macro-term-declared-in-refinement.flags (renamed from test/files/run/macro-declared-in-refinement.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-refinement/Impls_1.scala (renamed from test/files/run/macro-declared-in-refinement/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-refinement/Macros_Test_2.scala)5
-rw-r--r--test/files/run/macro-term-declared-in-trait.check (renamed from test/files/run/macro-declared-in-trait.check)0
-rw-r--r--test/files/run/macro-term-declared-in-trait.flags (renamed from test/files/run/macro-declared-in-trait.flags)0
-rw-r--r--test/files/run/macro-term-declared-in-trait/Impls_1.scala (renamed from test/files/run/macro-declared-in-trait/Impls_1.scala)0
-rw-r--r--test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-trait/Macros_Test_2.scala)0
-rw-r--r--test/files/run/macro-toplevel-companion-b/Test_2.scala2
-rw-r--r--test/files/run/macro-toplevel-companion-c.check2
-rw-r--r--test/files/run/macro-toplevel-companion-c.scala2
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled.check4
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled2.check4
-rw-r--r--test/files/run/macro-undetparams-consfromsls.check6
-rw-r--r--test/files/run/macro-undetparams-macroitself.check2
-rw-r--r--test/files/run/manifests-new.scala5
-rw-r--r--test/files/run/manifests-old.scala5
-rw-r--r--test/files/run/mapConserve.scala5
-rw-r--r--test/files/run/memberpos.check11
-rw-r--r--test/files/run/memberpos.scala39
-rw-r--r--test/files/run/misc.check24
-rw-r--r--test/files/run/names-defaults.check4
-rw-r--r--test/files/run/names-defaults.scala3
-rw-r--r--test/files/run/no-pickle-skolems/Test_2.scala2
-rw-r--r--test/files/run/option-fold.scala11
-rw-r--r--test/files/run/patmat-exprs.scala4
-rw-r--r--test/files/run/patmat_unapp_abstype-new.check6
-rw-r--r--test/files/run/patmatnew.check15
-rw-r--r--test/files/run/patmatnew.scala3
-rw-r--r--test/files/run/pc-conversions.scala4
-rw-r--r--test/files/run/pf-catch.scala4
-rw-r--r--test/files/run/preinits.check6
-rw-r--r--test/files/run/primitive-sigs-2-new.scala4
-rw-r--r--test/files/run/primitive-sigs-2-old.scala2
-rw-r--r--test/files/run/private-inline.check6
-rw-r--r--test/files/run/private-inline.flags2
-rw-r--r--test/files/run/private-inline.scala2
-rw-r--r--test/files/run/programmatic-main.scala2
-rw-r--r--test/files/run/range.scala11
-rw-r--r--test/files/run/records.scala3
-rw-r--r--test/files/run/reflection-allmirrors-tostring.scala4
-rw-r--r--test/files/run/reflection-fieldmirror-getsetval.check2
-rw-r--r--test/files/run/reflection-fieldmirror-getsetval.scala12
-rw-r--r--test/files/run/reflection-implicit.scala4
-rw-r--r--test/files/run/reflection-magicsymbols-vanilla.scala2
-rw-r--r--test/files/run/reify-each-node-type.check35
-rw-r--r--test/files/run/reify-each-node-type.scala110
-rw-r--r--test/files/run/reify-repl-fail-gracefully.check2
-rw-r--r--test/files/run/reify_ann1a.check2
-rw-r--r--test/files/run/reify_ann1b.check7
-rw-r--r--test/files/run/reify_ann2a.check2
-rw-r--r--test/files/run/reify_ann3.check2
-rw-r--r--test/files/run/reify_classfileann_a.check5
-rw-r--r--test/files/run/reify_classfileann_b.check5
-rw-r--r--test/files/run/reify_extendbuiltins.scala4
-rw-r--r--test/files/run/reify_implicits-new.scala4
-rw-r--r--test/files/run/reify_implicits-old.scala4
-rw-r--r--test/files/run/reify_lazyevaluation.scala2
-rw-r--r--test/files/run/reify_lazyunit.check3
-rw-r--r--test/files/run/reify_printf.check1
-rw-r--r--test/files/run/reify_printf.scala8
-rw-r--r--test/files/run/reify_this.scala6
-rw-r--r--test/files/run/repl-backticks.scala2
-rw-r--r--test/files/run/repl-colon-type.check8
-rw-r--r--test/files/run/repl-power.check1
-rw-r--r--test/files/run/repl-term-macros.check44
-rw-r--r--test/files/run/repl-term-macros.scala20
-rw-r--r--test/files/run/resetattrs-this.scala4
-rw-r--r--test/files/run/richs.scala4
-rw-r--r--test/files/run/runtime.check6
-rw-r--r--test/files/run/runtime.scala2
-rw-r--r--test/files/run/sequenceComparisons.scala2
-rw-r--r--test/files/run/settings-parse.scala2
-rw-r--r--test/files/run/si5045.scala3
-rw-r--r--test/files/run/slices.scala3
-rw-r--r--test/files/run/spec-nlreturn.scala3
-rw-r--r--test/files/run/stream_flatmap_odds.scala2
-rw-r--r--test/files/run/stringbuilder-drop.scala4
-rw-r--r--test/files/run/stringbuilder.scala7
-rw-r--r--test/files/run/stringinterpolation_macro-run.scala3
-rw-r--r--test/files/run/structural.scala5
-rw-r--r--test/files/run/synchronized.check1
-rw-r--r--test/files/run/synchronized.flags2
-rw-r--r--test/files/run/t0325.scala2
-rw-r--r--test/files/run/t0421-old.scala2
-rw-r--r--test/files/run/t0432.scala3
-rw-r--r--test/files/run/t0528.scala2
-rw-r--r--test/files/run/t0677-old.scala3
-rw-r--r--test/files/run/t1005.scala9
-rw-r--r--test/files/run/t1110.scala6
-rw-r--r--test/files/run/t1141.scala4
-rw-r--r--test/files/run/t1195-new.scala4
-rw-r--r--test/files/run/t1195-old.scala3
-rw-r--r--test/files/run/t1368.check3
-rw-r--r--test/files/run/t1427.check3
-rw-r--r--test/files/run/t1427.scala3
-rw-r--r--test/files/run/t1500.scala2
-rw-r--r--test/files/run/t1501.scala2
-rw-r--r--test/files/run/t153.scala2
-rw-r--r--test/files/run/t1766.scala3
-rw-r--r--test/files/run/t2106.check3
-rw-r--r--test/files/run/t2106.flags2
-rw-r--r--test/files/run/t2106.scala2
-rw-r--r--test/files/run/t2251b.check6
-rw-r--r--test/files/run/t2308a.scala2
-rw-r--r--test/files/run/t2318.scala2
-rw-r--r--test/files/run/t2333.scala4
-rw-r--r--test/files/run/t2417.scala2
-rw-r--r--test/files/run/t2464/Annotated.java5
-rw-r--r--test/files/run/t2464/Connect.java20
-rw-r--r--test/files/run/t2464/Test.scala35
-rw-r--r--test/files/run/t2514.scala6
-rw-r--r--test/files/run/t2594_tcpoly.scala5
-rw-r--r--test/files/run/t2636.scala5
-rw-r--r--test/files/run/t3038d.scala2
-rw-r--r--test/files/run/t3050.scala2
-rw-r--r--test/files/run/t3175.scala3
-rw-r--r--test/files/run/t3232.scala2
-rw-r--r--test/files/run/t3242.scala3
-rw-r--r--test/files/run/t3361.scala4
-rw-r--r--test/files/run/t3425.check4
-rw-r--r--test/files/run/t3425.scala41
-rw-r--r--test/files/run/t3425b.check152
-rw-r--r--test/files/run/t3425b/Base_1.scala89
-rw-r--r--test/files/run/t3425b/Generated_2.scala886
-rw-r--r--test/files/run/t3488.check6
-rw-r--r--test/files/run/t3507-new.scala4
-rw-r--r--test/files/run/t3529.scala1
-rw-r--r--test/files/run/t3651.scala2
-rw-r--r--test/files/run/t3705.scala4
-rw-r--r--test/files/run/t3758-old.scala4
-rw-r--r--test/files/run/t3855.scala4
-rw-r--r--test/files/run/t3888.check1
-rw-r--r--test/files/run/t3888.scala5
-rw-r--r--test/files/run/t3935.scala2
-rw-r--r--test/files/run/t3964.scala3
-rw-r--r--test/files/run/t4023.scala31
-rw-r--r--test/files/run/t4047.check12
-rw-r--r--test/files/run/t4072.scala2
-rw-r--r--test/files/run/t4080.scala5
-rw-r--r--test/files/run/t4148.scala6
-rw-r--r--test/files/run/t4171.scala3
-rw-r--r--test/files/run/t4398.scala2
-rw-r--r--test/files/run/t4560.scala3
-rw-r--r--test/files/run/t4660.scala2
-rw-r--r--test/files/run/t4680.check6
-rw-r--r--test/files/run/t4729/S_2.scala1
-rw-r--r--test/files/run/t4766.scala4
-rw-r--r--test/files/run/t4777.scala4
-rw-r--r--test/files/run/t4794.scala3
-rw-r--r--test/files/run/t4929.scala1
-rw-r--r--test/files/run/t498.scala3
-rw-r--r--test/files/run/t5053.scala3
-rw-r--r--test/files/run/t5080.scala4
-rw-r--r--test/files/run/t5224.check5
-rw-r--r--test/files/run/t5277_1.scala3
-rw-r--r--test/files/run/t5284b.check3
-rw-r--r--test/files/run/t5284c.check3
-rw-r--r--test/files/run/t5313.scala2
-rw-r--r--test/files/run/t5353.scala9
-rw-r--r--test/files/run/t5356.scala2
-rw-r--r--test/files/run/t5375.check2
-rw-r--r--test/files/run/t5375.scala23
-rw-r--r--test/files/run/t5380.check9
-rw-r--r--test/files/run/t5380.scala2
-rw-r--r--test/files/run/t5428.scala2
-rw-r--r--test/files/run/t5568.flags1
-rw-r--r--test/files/run/t5603.check2
-rw-r--r--test/files/run/t5629b.scala2
-rwxr-xr-xtest/files/run/t5699.scala2
-rw-r--r--test/files/run/t576.scala2
-rw-r--r--test/files/run/t5881.scala3
-rw-r--r--test/files/run/t5912.scala3
-rw-r--r--test/files/run/t5923a.check3
-rw-r--r--test/files/run/t5923a/Macros_1.scala14
-rw-r--r--test/files/run/t5923a/Test_2.scala5
-rw-r--r--test/files/run/t5923b.check3
-rw-r--r--test/files/run/t5923b/Test.scala7
-rw-r--r--test/files/run/t5942.scala2
-rw-r--r--test/files/run/t6011c.check3
-rw-r--r--test/files/run/t6028.check6
-rw-r--r--test/files/run/t603.scala2
-rw-r--r--test/files/run/t6102.check30
-rw-r--r--test/files/run/t6113.scala4
-rw-r--r--test/files/run/t6146b.check4
-rw-r--r--test/files/run/t6246.scala3
-rw-r--r--test/files/run/t6288.check2
-rw-r--r--test/files/run/t6308.check16
-rw-r--r--test/files/run/t6308.scala41
-rw-r--r--test/files/run/t6309.check1
-rw-r--r--test/files/run/t6309.scala16
-rw-r--r--test/files/run/t6329_vanilla.scala1
-rw-r--r--test/files/run/t6440.check2
-rw-r--r--test/files/run/t6443.scala2
-rw-r--r--test/files/run/t6481.check4
-rw-r--r--test/files/run/t6481.scala13
-rw-r--r--test/files/run/t6488.check1
-rw-r--r--test/files/run/t6488.scala57
-rw-r--r--test/files/run/t6555.check2
-rw-r--r--test/files/run/t657.scala2
-rw-r--r--test/files/run/t6690.scala2
-rw-r--r--test/files/run/t6715.scala15
-rw-r--r--test/files/run/t6731.scala2
-rw-r--r--test/files/run/t6793.scala9
-rw-r--r--test/files/run/t6793b.scala11
-rw-r--r--test/files/run/t6793c.scala11
-rw-r--r--test/files/run/t6863.check12
-rw-r--r--test/files/run/t6863.scala4
-rw-r--r--test/files/run/t6900.scala36
-rw-r--r--test/files/run/t6937.check26
-rw-r--r--test/files/run/t6937.scala12
-rw-r--r--test/files/run/t6969.scala4
-rw-r--r--test/files/run/t6989/JavaClass_1.java2
-rw-r--r--test/files/run/t7047.check3
-rw-r--r--test/files/run/t7047/Impls_Macros_1.scala19
-rw-r--r--test/files/run/t7047/Test_2.scala (renamed from test/files/run/macro-def-path-dependent-c/Test_2.scala)2
-rw-r--r--test/files/run/t7088.check2
-rw-r--r--test/files/run/t7088.scala13
-rw-r--r--test/files/run/t7096.scala3
-rw-r--r--test/files/run/t7120b.scala3
-rw-r--r--test/files/run/t7151.check6
-rw-r--r--test/files/run/t7151.scala24
-rw-r--r--test/files/run/t7157.check1
-rw-r--r--test/files/run/t7157/Impls_Macros_1.scala15
-rw-r--r--test/files/run/t7157/Test_2.scala (renamed from test/files/neg/macro-invalidimpl-c/Test_2.scala)4
-rw-r--r--test/files/run/t7171.check3
-rw-r--r--test/files/run/t7171.flags1
-rw-r--r--test/files/run/t7171.scala2
-rw-r--r--test/files/run/t7198.check2
-rw-r--r--test/files/run/t7198.scala9
-rw-r--r--test/files/run/t7200.scala34
-rw-r--r--test/files/run/t7240/Test_2.scala4
-rw-r--r--test/files/run/t7271.check12
-rw-r--r--test/files/run/t7271.scala35
-rw-r--r--test/files/run/t7290.check6
-rw-r--r--test/files/run/t7291.check2
-rw-r--r--test/files/run/t7291.scala22
-rw-r--r--test/files/run/t7300.check2
-rw-r--r--test/files/run/t7300.scala11
-rw-r--r--test/files/run/t7319.check45
-rw-r--r--test/files/run/t7319.scala14
-rw-r--r--test/files/run/t7325.check19
-rw-r--r--test/files/run/t7325.scala25
-rw-r--r--test/files/run/t7337.check1
-rw-r--r--test/files/run/t7337.scala19
-rw-r--r--test/files/run/t7341.check0
-rwxr-xr-xtest/files/run/t7341.flags1
-rwxr-xr-xtest/files/run/t7341.scala15
-rw-r--r--test/files/run/t7359.check1
-rw-r--r--test/files/run/t7359/Cyclic_1.java3
-rw-r--r--test/files/run/t7359/Test_2.scala6
-rw-r--r--test/files/run/t7375a.check4
-rw-r--r--test/files/run/t7375a.scala16
-rw-r--r--test/files/run/t7375b.check4
-rw-r--r--test/files/run/t7375b/Macros_1.scala18
-rw-r--r--test/files/run/t7375b/Test_2.scala (renamed from test/files/run/macro-def-path-dependent-d1/Test_2.scala)2
-rw-r--r--test/files/run/t7398.scala28
-rw-r--r--test/files/run/t7436.scala9
-rw-r--r--test/files/run/t7482a.check14
-rw-r--r--test/files/run/t7482a.scala8
-rw-r--r--test/files/run/t7498.scala20
-rw-r--r--test/files/run/t7507.scala31
-rw-r--r--test/files/run/t7556.check2
-rw-r--r--test/files/run/t7556/Test_2.scala11
-rw-r--r--test/files/run/t7556/mega-class_1.scala3002
-rw-r--r--test/files/run/t7558.scala9
-rw-r--r--test/files/run/t874.scala2
-rw-r--r--test/files/run/tailcalls.check56
-rw-r--r--test/files/run/tailcalls.scala2
-rw-r--r--test/files/run/tcpoly_monads.scala3
-rw-r--r--test/files/run/tcpoly_overriding.scala3
-rw-r--r--test/files/run/tcpoly_parseridioms.check20
-rw-r--r--test/files/run/tcpoly_parseridioms.scala3
-rw-r--r--test/files/run/toolbox_console_reporter.scala13
-rw-r--r--test/files/run/tpeCache-tyconCache.check19
-rw-r--r--test/files/run/tpeCache-tyconCache.scala10
-rw-r--r--test/files/run/transform.scala2
-rw-r--r--test/files/run/try-2.check3
-rw-r--r--test/files/run/try-2.scala12
-rw-r--r--test/files/run/try-catch-unify.scala2
-rw-r--r--test/files/run/try.check3
-rw-r--r--test/files/run/try.scala8
-rw-r--r--test/files/run/tuple-zipped.scala3
-rw-r--r--test/files/run/tuples.scala1
-rw-r--r--test/files/run/type-currying.scala3
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_lookup.check2
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check2
-rw-r--r--test/files/run/unapply.check3
-rw-r--r--test/files/run/unittest_io.scala2
-rw-r--r--test/files/run/unreachable.scala19
-rw-r--r--test/files/run/valueclasses-classmanifest-basic.scala3
-rw-r--r--test/files/run/valueclasses-classmanifest-existential.scala3
-rw-r--r--test/files/run/valueclasses-classmanifest-generic.scala3
-rw-r--r--test/files/run/vector1.scala2
-rw-r--r--test/files/run/view-iterator-stream.scala3
-rw-r--r--test/files/run/virtpatmat_alts.check6
-rw-r--r--test/files/run/virtpatmat_alts.flags1
-rw-r--r--test/files/run/virtpatmat_alts.scala5
-rw-r--r--test/files/run/virtpatmat_nested_lists.check5
-rw-r--r--test/files/run/virtpatmat_nested_lists.flags1
-rw-r--r--test/files/run/virtpatmat_nested_lists.scala3
-rw-r--r--test/files/run/virtpatmat_opt_sharing.check3
-rw-r--r--test/files/run/virtpatmat_opt_sharing.flags1
-rw-r--r--test/files/run/virtpatmat_opt_sharing.scala5
-rw-r--r--test/files/run/virtpatmat_staging.scala7
-rw-r--r--test/files/run/virtpatmat_stringinterp.scala5
-rw-r--r--test/files/run/virtpatmat_try.scala8
-rw-r--r--test/files/run/virtpatmat_typed.check3
-rw-r--r--test/files/run/xml-loop-bug.scala14
-rw-r--r--test/files/scalacheck/HashTrieSplit.scala47
-rw-r--r--test/files/scalacheck/avl.scala4
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala14
-rw-r--r--test/files/scalap/abstractClass.check (renamed from test/files/scalap/abstractClass/result.test)0
-rw-r--r--test/files/scalap/abstractClass.scala (renamed from test/files/scalap/abstractClass/A.scala)0
-rw-r--r--test/files/scalap/abstractMethod.check (renamed from test/files/scalap/abstractMethod/result.test)0
-rw-r--r--test/files/scalap/abstractMethod.scala (renamed from test/files/scalap/abstractMethod/A.scala)0
-rw-r--r--test/files/scalap/caseClass.check (renamed from test/files/scalap/caseClass/result.test)0
-rw-r--r--test/files/scalap/caseClass.scala (renamed from test/files/scalap/caseClass/A.scala)0
-rw-r--r--test/files/scalap/caseObject.check (renamed from test/files/scalap/caseObject/result.test)0
-rw-r--r--test/files/scalap/caseObject.scala (renamed from test/files/scalap/caseObject/A.scala)0
-rw-r--r--test/files/scalap/cbnParam.check (renamed from test/files/scalap/cbnParam/result.test)0
-rw-r--r--test/files/scalap/cbnParam.scala (renamed from test/files/scalap/cbnParam/A.scala)0
-rw-r--r--test/files/scalap/classPrivate.check (renamed from test/files/scalap/classPrivate/result.test)0
-rw-r--r--test/files/scalap/classPrivate.scala (renamed from test/files/scalap/classPrivate/A.scala)0
-rw-r--r--test/files/scalap/classWithExistential.check (renamed from test/files/scalap/classWithExistential/result.test)0
-rw-r--r--test/files/scalap/classWithExistential.scala (renamed from test/files/scalap/classWithExistential/A.scala)0
-rw-r--r--test/files/scalap/classWithSelfAnnotation.check (renamed from test/files/scalap/classWithSelfAnnotation/result.test)0
-rw-r--r--test/files/scalap/classWithSelfAnnotation.scala (renamed from test/files/scalap/classWithSelfAnnotation/A.scala)0
-rw-r--r--test/files/scalap/covariantParam.check (renamed from test/files/scalap/covariantParam/result.test)0
-rw-r--r--test/files/scalap/covariantParam.scala (renamed from test/files/scalap/covariantParam/A.scala)0
-rw-r--r--test/files/scalap/defaultParameter.check (renamed from test/files/scalap/defaultParameter/result.test)0
-rw-r--r--test/files/scalap/defaultParameter.scala (renamed from test/files/scalap/defaultParameter/A.scala)0
-rw-r--r--test/files/scalap/implicitParam.check (renamed from test/files/scalap/implicitParam/result.test)0
-rw-r--r--test/files/scalap/implicitParam.scala (renamed from test/files/scalap/implicitParam/A.scala)0
-rw-r--r--test/files/scalap/packageObject.check (renamed from test/files/scalap/packageObject/result.test)0
-rw-r--r--test/files/scalap/packageObject.scala (renamed from test/files/scalap/packageObject/A.scala)0
-rw-r--r--test/files/scalap/paramClauses.check (renamed from test/files/scalap/paramClauses/result.test)0
-rw-r--r--test/files/scalap/paramClauses.scala (renamed from test/files/scalap/paramClauses/A.scala)0
-rw-r--r--test/files/scalap/paramNames.check (renamed from test/files/scalap/paramNames/result.test)0
-rw-r--r--test/files/scalap/paramNames.scala (renamed from test/files/scalap/paramNames/A.scala)0
-rw-r--r--test/files/scalap/sequenceParam.check (renamed from test/files/scalap/sequenceParam/result.test)0
-rw-r--r--test/files/scalap/sequenceParam.scala (renamed from test/files/scalap/sequenceParam/A.scala)0
-rw-r--r--test/files/scalap/simpleClass.check (renamed from test/files/scalap/simpleClass/result.test)0
-rw-r--r--test/files/scalap/simpleClass.scala (renamed from test/files/scalap/simpleClass/A.scala)0
-rw-r--r--test/files/scalap/traitObject.check (renamed from test/files/scalap/traitObject/result.test)0
-rw-r--r--test/files/scalap/traitObject.scala (renamed from test/files/scalap/traitObject/A.scala)0
-rw-r--r--test/files/scalap/typeAnnotations.check (renamed from test/files/scalap/typeAnnotations/result.test)0
-rw-r--r--test/files/scalap/typeAnnotations.scala (renamed from test/files/scalap/typeAnnotations/A.scala)0
-rw-r--r--test/files/scalap/valAndVar.check (renamed from test/files/scalap/valAndVar/result.test)0
-rw-r--r--test/files/scalap/valAndVar.scala (renamed from test/files/scalap/valAndVar/A.scala)0
-rw-r--r--test/files/scalap/wildcardType.check (renamed from test/files/scalap/wildcardType/result.test)0
-rw-r--r--test/files/scalap/wildcardType.scala (renamed from test/files/scalap/wildcardType/A.scala)0
-rw-r--r--test/files/specialized/spec-matrix-old.scala2
-rw-r--r--test/files/specialized/spec-super.check5
-rw-r--r--test/files/specialized/spec-t3896.scala2
-rw-r--r--test/files/specialized/tb3651.check5
-rw-r--r--test/files/specialized/tc3651.check5
-rw-r--r--test/files/specialized/td3651.check8
-rw-r--r--test/junit/scala/reflect/internal/util/WeakHashSetTest.scala171
-rw-r--r--test/junit/scala/tools/nsc/SampleTest.scala17
-rwxr-xr-xtest/partest42
-rw-r--r--test/pending/neg/plugin-after-terminal.flags2
-rw-r--r--test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-after-terminal/misc/build.sh14
-rw-r--r--test/pending/neg/plugin-after-terminal/testsource.scala4
-rw-r--r--test/pending/neg/plugin-before-parser.flags2
-rw-r--r--test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-before-parser/misc/build.sh14
-rw-r--r--test/pending/neg/plugin-before-parser/testsource.scala4
-rw-r--r--test/pending/neg/plugin-cyclic-dependency.check2
-rw-r--r--test/pending/neg/plugin-cyclic-dependency.flags2
-rw-r--r--test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-cyclic-dependency/misc/build.sh14
-rw-r--r--test/pending/neg/plugin-cyclic-dependency/testsource.scala4
-rw-r--r--test/pending/neg/plugin-multiple-rafter.flags2
-rw-r--r--test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-multiple-rafter/misc/build.sh14
-rw-r--r--test/pending/neg/plugin-multiple-rafter/testsource.scala4
-rw-r--r--test/pending/neg/plugin-rafter-before-1.check2
-rw-r--r--test/pending/neg/plugin-rafter-before-1.flags2
-rw-r--r--test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-rafter-before-1/misc/build.sh14
-rw-r--r--test/pending/neg/plugin-rafter-before-1/testsource.scala4
-rw-r--r--test/pending/neg/plugin-rightafter-terminal.flags2
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha11
-rwxr-xr-xtest/pending/neg/plugin-rightafter-terminal/misc/build.sh14
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml5
-rw-r--r--test/pending/neg/plugin-rightafter-terminal/testsource.scala4
-rw-r--r--test/pending/neg/t7441.check6
-rw-r--r--test/pending/neg/t7441.scala7
-rw-r--r--test/pending/neg/t7494-cyclic-dependency/ThePlugin.scala (renamed from test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala)0
-rw-r--r--test/pending/neg/t7494-cyclic-dependency/sample_2.flags1
-rw-r--r--test/pending/neg/t7494-cyclic-dependency/sample_2.scala6
-rw-r--r--test/pending/neg/t7494-cyclic-dependency/scalac-plugin.xml5
-rw-r--r--test/pending/pos/t1786.scala20
-rw-r--r--test/pending/pos/t7234.scala (renamed from test/files/pos/t7234.scala)0
-rw-r--r--test/pending/pos/t7234b.scala (renamed from test/files/pos/t7234b.scala)0
-rw-r--r--test/pending/pos/t7486.scala8
-rw-r--r--test/pending/run/macro-term-declared-in-anonymous-explicit-import.check0
-rw-r--r--test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala11
-rw-r--r--test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala (renamed from test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala)2
-rw-r--r--test/pending/run/t6387.check (renamed from test/files/run/t6387.check)0
-rw-r--r--test/pending/run/t6387.scala (renamed from test/files/run/t6387.scala)0
-rwxr-xr-xtest/scaladoc/run/SI-191-deprecated.scala72
-rw-r--r--test/scaladoc/run/SI-6715.check1
-rw-r--r--test/scaladoc/run/SI-6715.scala15
-rwxr-xr-xtest/scaladoc/run/SI-7367.check (renamed from test/scaladoc/run/SI-191-deprecated.check)0
-rwxr-xr-xtest/scaladoc/run/SI-7367.scala25
-rw-r--r--test/scaladoc/run/t5527.check44
-rw-r--r--test/scaladoc/run/t5527.scala48
-rw-r--r--test/scaladoc/scalacheck/CommentFactoryTest.scala6
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala17
-rw-r--r--test/scaladoc/scalacheck/IndexScriptTest.scala14
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala12
1997 files changed, 25399 insertions, 15214 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 551100ae85..2451a52682 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -6,7 +6,7 @@ These guidelines are meant to be a living document that should be changed and ad
This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc.
-1. Make sure you have signed the [Scala CLA](http://www.scala-lang.org/sites/default/files/contributor_agreement.pdf), if not, sign it.
+1. Make sure you have signed the [Scala CLA](http://typesafe.com/contribute/cla/scala), if not, sign it.
2. Before starting to work on a feature or a fix, it's good practice to ensure that:
1. There is a ticket for your work in the project's issue tracker. If not, create it first (perhaps given a thumbs up from the scala-internals mailing list first).
2. The ticket has been discussed and prioritized by the team.
diff --git a/README.rst b/README.rst
index c871adb908..004d7b63d0 100644
--- a/README.rst
+++ b/README.rst
@@ -189,15 +189,10 @@ In detail:
- Scala live git source tree:
http://github.com/scala/scala
-- Contact form:
- http://www.scala-lang.org/node/188
-
-
-If you are interested in contributing code, we ask you to complete and submit
-to us the Scala Contributor License Agreement, which allows us to ensure that
-all code submitted to the project is unencumbered by copyrights or patents.
-The form is available at:
-http://www.scala-lang.org/sites/default/files/contributor_agreement.pdf
+If you are interested in contributing code, we ask you to sign the
+[Scala Contributor License Agreement](http://typesafe.com/contribute/cla/scala),
+which allows us to ensure that all code submitted to the project is
+unencumbered by copyrights or patents.
Before submitting a pull-request, please make sure you have followed the guidelines
outlined in our `Pull Request Policy <https://github.com/scala/scala/wiki/Pull-Request-Policy>`_.
diff --git a/build.xml b/build.xml
index 3fe890c2fd..2af335d6ab 100644..100755
--- a/build.xml
+++ b/build.xml
@@ -1,10 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="sabbus" default="build" xmlns:artifact="urn:maven-artifact-ant">
+ <include file="test/build-partest.xml" as="partest"/>
+
<description>
SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
</description>
+<!-- HINTS
+
+ - for faster builds, have a build.properties in the same directory as build.xml that says:
+ locker.skip=1
+ starr.use.released=1
+
+-->
+
<!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
ant $antArgs $scalacArgs $targets
@@ -16,7 +26,20 @@ scalacArgs examples:
"-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
targets exercised:
- build-opt nightly test.suite test.continuations.suite test.scaladoc locker.done
+ locker.done build-opt nightly test.suite test.continuations.suite test.scaladoc
+-->
+
+<!-- To use Zinc with the ant build:
+ - install zinc and symlink the installed zinc script to ${basedir}/tools/zinc (${basedir} is where build.xml and the rest of your checkout resides)
+ - make sure to set ZINC_OPTS to match ANT_OPTS!
+-->
+
+<!--
+TODO:
+ - detect zinc anywhere on PATH
+ - automatically set ZINC_OPTS
+ - skip locker (and test.stability) by default to speed up PR validation, still do full build & testing during nightly
+ - (rework the build to pack locker and build using that when using zinc)
-->
@@ -89,9 +112,13 @@ targets exercised:
<!-- Loads custom properties definitions -->
<property file="${basedir}/build.properties"/>
+
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
+ <!-- read starr.version -->
+ <property file="${basedir}/starr.number"/>
+
<!-- Sets location of pre-compiled libraries -->
<property name="library.starr.jar" value="${lib.dir}/scala-library.jar"/>
<property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
@@ -110,6 +137,7 @@ targets exercised:
<property name="build-quick.dir" value="${build.dir}/quick"/>
<property name="build-pack.dir" value="${build.dir}/pack"/>
<property name="build-osgi.dir" value="${build.dir}/osgi"/>
+ <property name="build-junit.dir" value="${build.dir}/junit"/>
<property name="build-strap.dir" value="${build.dir}/strap"/>
<property name="build-docs.dir" value="${build.dir}/scaladoc"/>
<property name="build-sbt.dir" value="${build.dir}/sbt-interface"/>
@@ -117,10 +145,13 @@ targets exercised:
<property name="test.osgi.src" value="${partest.dir}/osgi/src"/>
<property name="test.osgi.classes" value="${build-osgi.dir}/classes"/>
+ <property name="test.junit.src" value="${partest.dir}/junit"/>
+ <property name="test.junit.classes" value="${build-junit.dir}/classes"/>
+
<property name="dists.dir" value="${basedir}/dists"/>
<property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
- <property name="partest.version.number" value="0.9.2"/>
+ <property name="partest.version.number" value="0.9.3"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
@@ -181,6 +212,12 @@ targets exercised:
<dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
</artifact:dependencies>
+ <!-- JUnit -->
+ <property name="junit.version" value="4.10"/>
+ <artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
+ <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
+ </artifact:dependencies>
+
<!-- Pax runner -->
<property name="pax.exam.version" value="2.5.0"/>
<artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
@@ -191,26 +228,36 @@ targets exercised:
<dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-framework" version="1.5.1"/>
<dependency groupId="ch.qos.logback" artifactId="logback-core" version="0.9.20"/>
<dependency groupId="ch.qos.logback" artifactId="logback-classic" version="0.9.20"/>
- <dependency groupId="junit" artifactId="junit" version="4.10"/>
+ <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
<dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="3.2.2"/>
</artifact:dependencies>
+
<artifact:dependencies pathId="partest.extras.classpath" filesetId="partest.extras.fileset" versionsId="partest.extras.versions">
<dependency groupId="com.googlecode.java-diff-utils" artifactId="diffutils" version="1.3.0"/>
+ <dependency groupId="org.scala-tools.testing" artifactId="test-interface" version="0.5" />
</artifact:dependencies>
<!-- BND support -->
<typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
- <!-- Download STARR via maven if `starr.version` is specified.
+ <!-- Download STARR via maven if `starr.use.released` is set,
+ and `starr.version` is specified (see the starr.number properties file).
Want to slow down STARR changes, using only released versions. -->
- <if><isset property="starr.version"/><then>
+ <if><isset property="starr.use.released"/><then>
+ <echo message="Using Scala ${starr.version} for STARR."/>
<artifact:dependencies pathId="starr.core.path">
<dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
<dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
<dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
- </artifact:dependencies>
- </then></if>
+ </artifact:dependencies></then>
+ <else>
+ <path id="starr.core.path">
+ <pathelement location="${library.starr.jar}"/>
+ <pathelement location="${reflect.starr.jar}"/>
+ <pathelement location="${compiler.starr.jar}"/>
+ </path></else>
+ </if>
<property name="maven-deps-done" value="yep!"/>
</then></if>
@@ -245,6 +292,10 @@ targets exercised:
<property name="maven.version.suffix" value="${version.suffix}"/>
<property name="osgi.version.suffix" value="${version.suffix}"/></else></if>
+ <!-- if a maven version suffix was set (or inferred), assume we're building a release -->
+ <if><isset property="maven.version.suffix"/><then>
+ <property name="build.release" value="1"/></then></if>
+
<!-- not building a release and no version.suffix specified -->
<property name="maven.version.suffix" value="-SNAPSHOT"/>
@@ -404,24 +455,10 @@ targets exercised:
<path refid="aux.libs"/>
</path>
- <!-- Download STARR via maven if `starr.version` is specified.
- Want to slow down STARR changes, using only released versions. -->
- <if><isset property="starr.version"/><then>
- <property name="strr" refid="starr.compiler.path"/>
- <echo message="Using Scala ${starr.version} for STARR."/>
- <!-- <echo message="STARR classpath: ${strr}"/> -->
- </then><else>
- <path id="starr.core.path">
- <pathelement location="${library.starr.jar}"/>
- <pathelement location="${reflect.starr.jar}"/>
- <pathelement location="${compiler.starr.jar}"/>
- </path>
- </else></if>
-
- <!-- Skip locker with -Dlocker.skip=YESSIR. Uses STARR instead. -->
+ <!-- To skip locker, use -Dlocker.skip=1 -->
<if><isset property="locker.skip"/><then>
- <echo message="Skipping locker! Using STARR instead."/>
- <path id="locker.compiler.path"><path refid="starr.compiler.path"/></path>
+ <echo message="Using STARR to build the quick stage (skipping locker)."/>
+ <path id="locker.compiler.path" refid="starr.compiler.path"/>
<property name="locker.locked" value="locker skipped"/></then>
<else>
<path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
@@ -445,6 +482,11 @@ targets exercised:
<path refid="aux.libs"/>
</path>
+ <path id="locker.actors.build.path">
+ <path refid="locker.library.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/actors"/>
+ </path>
+
<path id="locker.reflect.build.path">
<path refid="locker.library.build.path"/>
<pathelement location="${build-locker.dir}/classes/reflect"/>
@@ -463,7 +505,10 @@ targets exercised:
<path refid="aux.libs"/>
</path>
- <path id="quick.actors.build.path"><path refid="quick.library.build.path"/></path>
+ <path id="quick.actors.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ </path>
<path id="quick.reflect.build.path">
<path refid="quick.library.build.path"/>
@@ -484,6 +529,7 @@ targets exercised:
<path id="quick.swing.build.path">
<path refid="quick.library.build.path"/>
+ <path refid="quick.actors.build.path"/>
<pathelement location="${build-quick.dir}/classes/swing"/>
</path>
@@ -494,7 +540,9 @@ targets exercised:
<path id="quick.scalacheck.build.path">
<pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
<pathelement location="${build-quick.dir}/classes/scalacheck"/>
+ <path refid="partest.extras.classpath"/>
</path>
<path id="quick.scalap.build.path">
@@ -506,7 +554,7 @@ targets exercised:
<path id="quick.partest.build.path">
<path refid="quick.scalap.build.path"/>
<path refid="partest.extras.classpath"/>
- <pathelement location="${build-quick.dir}/classes/repl"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${scalacheck.jar}"/>
</path>
@@ -524,6 +572,7 @@ targets exercised:
<path id="quick.bin.tool.path">
<path refid="quick.repl.build.path"/>
+ <path refid="quick.actors.build.path"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/continuations-library"/>
</path>
@@ -552,18 +601,13 @@ targets exercised:
</path>
<path id="pack.library.files">
- <fileset dir="${build-quick.dir}/classes/library">
- <!-- <exclude name="scala/swing/**"/> -->
- <exclude name="scala/actors/**"/>
- </fileset>
+ <fileset dir="${build-quick.dir}/classes/library"/>
<fileset dir="${build-quick.dir}/classes/continuations-library"/>
<fileset dir="${forkjoin-classes}"/>
</path>
<path id="pack.actors.files">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/actors/**"/>
- </fileset>
+ <fileset dir="${build-quick.dir}/classes/actors"/>
</path>
<path id="pack.compiler.files">
@@ -599,7 +643,6 @@ targets exercised:
<path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
</path>
- <path id="strap.actors.build.path"><path refid="strap.library.build.path"/></path>
<path id="strap.reflect.build.path">
<path refid="strap.library.build.path"/>
@@ -631,10 +674,20 @@ targets exercised:
<path refid="partest.extras.classpath"/>
</path>
+ <!-- obsolete? -->
+ <!-- TODO - segregate swing tests (there can't be many) -->
+ <!--
<path id="partest.build.path">
<path refid="pack.compiler.path"/>
<fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/> <!-- TODO - segregate swing tests (there can't be many) -->
+ <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
+ </path>
+ -->
+
+ <path id="test.junit.compiler.build.path">
+ <pathelement location="${test.junit.classes}"/>
+ <path refid="quick.compiler.build.path"/>
+ <path refid="junit.classpath"/>
</path>
<path id="test.osgi.compiler.build.path">
@@ -728,6 +781,7 @@ targets exercised:
<stopwatch name="@{project}.timer"/>
<mkdir dir="${@{project}-classes}"/>
<javac
+ debug="true"
srcdir="${src.dir}/@{project}"
destdir="${@{project}-classes}"
classpath="${@{project}-classes}"
@@ -743,8 +797,8 @@ targets exercised:
<touch file="${build-libs.dir}/@{project}.complete" verbose="no"/>
</then></if>
</sequential>
- </macrodef>
-
+ </macrodef>
+
<target name="asm.done" depends="init"> <simple-javac project="asm" jar="no"/> </target>
<target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
@@ -760,6 +814,7 @@ targets exercised:
<sequential>
<javac
+ debug="true"
srcdir="${src.dir}/@{project}"
destdir="${build-@{stage}.dir}/classes/@{destproject}"
includes="**/*.java"
@@ -771,6 +826,49 @@ targets exercised:
</sequential>
</macrodef>
+ <!-- Zinc assumes a one-to-one correspondence of output folder to set of source files.
+ When compiling different sets of source files in multiple compilations to the same output directory,
+ Zinc thinks source files that appeared in an earlier compilation but are absent in the current one,
+ were deleted and thus deletes the corresponding output files.
+
+ Note that zinc also requires each arg to scalac to be prefixed by -S.
+ -->
+ <macrodef name="zinc">
+ <attribute name="compilerpathref" />
+ <attribute name="destdir" />
+ <attribute name="srcdir" />
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library, "NOT SET" is just a convention to denote an optional attribute -->
+ <attribute name="buildpathref" />
+ <attribute name="params" default="" />
+ <attribute name="java-excludes" default=""/>
+
+ <sequential>
+ <local name="sources"/>
+ <pathconvert pathsep=" " property="sources">
+ <fileset dir="@{srcdir}">
+ <include name="**/*.java"/>
+ <include name="**/*.scala"/>
+ <exclude name="@{java-excludes}"/>
+ </fileset>
+ </pathconvert>
+ <local name="args"/>
+ <local name="sargs"/>
+ <if><not><equals arg1="@{srcpath}" arg2="NOT SET"/></not><then>
+ <property name="args" value="@{params} -sourcepath @{srcpath}"/>
+ </then></if>
+ <property name="args" value="@{params}"/> <!-- default -->
+
+ <!-- HACK: prefix scalac args by -S -->
+ <script language="javascript">
+ project.setProperty("sargs", project.getProperty("args").trim().replaceAll(" ", " -S"));
+ </script>
+
+ <exec osfamily="unix" executable="tools/zinc" failifexecutionfails="true" failonerror="true">
+ <arg line="-nailed -compile-order JavaThenScala -scala-path ${ant.refid:@{compilerpathref}} -d @{destdir} -classpath ${toString:@{buildpathref}} ${sargs} ${sources}"/>
+ </exec>
+ </sequential>
+ </macrodef>
+
<macrodef name="staged-scalac" >
<attribute name="with"/> <!-- will use path `@{with}.compiler.path` to locate scalac -->
<attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
@@ -779,33 +877,46 @@ targets exercised:
<attribute name="args" default=""/> <!-- additional args -->
<attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
<attribute name="srcdir" default="@{project}"/>
- <element name="args" implicit="true" optional="true"/>
+ <attribute name="java-excludes" default=""/>
<sequential>
+ <!-- TODO: detect zinc anywhere on PATH
+ use zinc for the quick stage if it's available;
+ would use it for locker but something is iffy in sbt: get a class cast error on global phase -->
+ <if><and> <available file="tools/zinc"/>
+ <equals arg1="@{stage}" arg2="quick"/>
+ <not><equals arg1="@{project}" arg2="plugins"/></not> <!-- doesn't work in zinc because it requires the quick compiler, which isn't jarred up-->
+ </and><then>
+ <zinc taskname="Z.@{stage}.@{project}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ srcpath="@{srcpath}"
+ buildpathref="@{stage}.@{project}.build.path"
+ params="${scalac.args.@{stage}} @{args}"
+ java-excludes="@{java-excludes}"/></then>
+ <else>
<if><equals arg1="@{srcpath}" arg2="NOT SET"/><then>
<scalacfork taskname="@{stage}.@{project}"
- destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ jvmargs="${scalacfork.jvmargs}"
compilerpathref="@{with}.compiler.path"
- params="${scalac.args.@{stage}} @{args}"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
srcdir="${src.dir}/@{srcdir}"
- jvmargs="${scalacfork.jvmargs}">
+ params="${scalac.args.@{stage}} @{args}">
<include name="**/*.scala"/>
- <compilationpath refid="@{stage}.@{project}.build.path"/>
- <args/>
- </scalacfork></then>
+ <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></then>
<else>
<scalacfork taskname="@{stage}.@{project}"
- destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ jvmargs="${scalacfork.jvmargs}"
compilerpathref="@{with}.compiler.path"
- srcpath="@{srcpath}"
- params="${scalac.args.@{stage}} @{args}"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
srcdir="${src.dir}/@{srcdir}"
- jvmargs="${scalacfork.jvmargs}">
+ srcpath="@{srcpath}"
+ params="${scalac.args.@{stage}} @{args}">
<include name="**/*.scala"/>
- <compilationpath refid="@{stage}.@{project}.build.path"/>
- <args/>
- </scalacfork></else>
+ <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></else>
</if>
+ </else></if>
</sequential>
</macrodef>
@@ -833,20 +944,17 @@ targets exercised:
<attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library -->
<attribute name="args" default=""/> <!-- additional args -->
<attribute name="includes" default="comp.includes"/>
+ <attribute name="java-excludes" default=""/>
<attribute name="version" default=""/> <!-- non-empty for partest and scaladoc: use @{version}.version.number in property file-->
- <element name="pre" optional="true"/>
- <element name="post" optional="true"/>
-
<sequential>
<staged-uptodate stage="@{stage}" project="@{project}">
<check><srcfiles dir="${src.dir}/@{project}"/></check>
<do>
<stopwatch name="@{stage}.@{project}.timer"/>
<mkdir dir="${build-@{stage}.dir}/classes/@{project}"/>
- <pre/>
- <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}"/>
- <post/>
+ <staged-javac stage="@{stage}" project="@{project}" excludes="@{java-excludes}"/> <!-- always compile with javac for simplicity and regularity; it's cheap -->
+ <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}" java-excludes="@{java-excludes}"/>
<if><equals arg1="@{version}" arg2=""/><then>
<propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
<entry key = "version.number" value="${version.number}"/>
@@ -999,16 +1107,20 @@ targets exercised:
<!-- ===========================================================================
LOCAL REFERENCE BUILD (LOCKER)
============================================================================ -->
- <target name="locker.start" depends="asm.done, forkjoin.done">
+ <target name="locker.start" depends="asm.done, forkjoin.done">
<condition property="locker.locked"><available file="${build-locker.dir}/locker.locked"/></condition></target>
- <target name="locker.lib" depends="locker.start" unless="locker.locked">
- <staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes">
- <pre><staged-javac stage="locker" project="library" args="-XDignore.symbol.file"/></pre></staged-build></target>
- <!-- TODO: args="-XDignore.symbol.file" necessary?? -->
+ <target name="locker.lib" depends="locker.start" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes"/></target>
+
+ <target name="locker.actors" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="actors"/> </target>
- <target name="locker.reflect" depends="locker.lib" unless="locker.locked"> <staged-build with="starr" stage="locker" project="reflect"/></target>
- <target name="locker.comp" depends="locker.reflect" unless="locker.locked"> <staged-build with="starr" stage="locker" project="compiler"/></target>
+ <target name="locker.reflect" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="reflect"/></target>
+
+ <target name="locker.comp" depends="locker.reflect" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="compiler"/></target>
<target name="locker.done" depends="locker.comp">
<mkdir dir="${build-locker.dir}"/>
@@ -1020,62 +1132,43 @@ targets exercised:
<!-- ===========================================================================
QUICK BUILD (QUICK)
============================================================================ -->
- <target name="quick.start" depends="locker.done"/>
+ <target name="quick.start" depends="locker.done"/>
- <target name="quick.lib" depends="quick.start">
- <staged-build with="locker" stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes">
- <pre>
- <staged-javac stage="quick" project="library" args="-XDignore.symbol.file"/> <!-- TODO: args="-XDignore.symbol.file" necessary?? -->
- <staged-javac stage="quick" project="actors" destproject="library"/>
- </pre>
- <post>
- <staged-scalac with="locker" stage="quick" project="actors" destproject="library"/>
- </post>
- </staged-build>
- </target>
+ <target name="quick.lib" depends="quick.start">
+ <staged-build with="locker" stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/></target>
- <target name="quick.reflect" depends="quick.lib"> <staged-build with="locker" stage="quick" project="reflect"/> </target>
+ <target name="quick.actors" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="actors"/> </target>
- <target name="quick.comp" depends="quick.reflect">
- <staged-build with="locker" stage="quick" project="compiler"/>
- </target>
+ <target name="quick.reflect" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="reflect"/> </target>
- <target name="quick.repl" depends="quick.comp">
- <staged-build with="locker" stage="quick" project="repl">
- <pre>
- <staged-javac stage="quick" project="repl"/>
- </pre>
- </staged-build>
- </target>
+ <target name="quick.comp" depends="quick.reflect">
+ <staged-build with="locker" stage="quick" project="compiler"/> </target>
- <target name="quick.scalacheck" depends="quick.lib">
- <staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/>
- </target>
+ <target name="quick.repl" depends="quick.comp">
+ <staged-build with="locker" stage="quick" project="repl"/> </target>
- <target name="quick.scalap" depends="quick.repl">
- <staged-build with="locker" stage="quick" project="scalap"/>
- </target>
+ <target name="quick.scalacheck" depends="quick.actors, quick.lib">
+ <staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/> </target>
- <target name="quick.partest" depends="quick.scalap, quick.repl, asm.done">
- <staged-build with="locker" stage="quick" project="partest" version="partest">
- <pre><staged-javac stage="quick" project="partest"/></pre>
- </staged-build>
- </target>
+ <target name="quick.scalap" depends="quick.repl">
+ <staged-build with="locker" stage="quick" project="scalap"/> </target>
+
+ <target name="quick.partest" depends="quick.scalap, quick.repl, asm.done">
+ <staged-build with="locker" stage="quick" project="partest" version="partest"/> </target>
<target name="quick.scaladoc" depends="quick.comp, quick.partest">
- <staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/>
- </target>
+ <staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
<target name="quick.interactive" depends="quick.comp, quick.scaladoc">
- <staged-build with="locker" stage="quick" project="interactive"/>
- </target>
+ <staged-build with="locker" stage="quick" project="interactive"/> </target>
- <target name="quick.swing" depends="quick.lib" if="has.java6">
- <staged-build with="locker" stage="quick" project="swing"/>
- </target>
+ <target name="quick.swing" depends="quick.actors, quick.lib" if="has.java6">
+ <staged-build with="locker" stage="quick" project="swing"/> </target>
- <target name="quick.plugins" depends="quick.comp">
- <staged-uptodate stage="quick" project="plugins">
+ <target name="quick.plugins" depends="quick.comp">
+ <staged-uptodate stage="quick" project="plugins">
<check><srcfiles dir="${src.dir}/continuations"/></check>
<do>
<stopwatch name="quick.plugins.timer"/>
@@ -1092,24 +1185,26 @@ targets exercised:
<!-- might split off library part into its own ant target -->
<mkdir dir="${build-quick.dir}/classes/continuations-library"/>
- <!-- have to compile with quick compiler here! -->
+ <!-- TODO: must build with quick to avoid
+ [quick.plugins] error: java.lang.NoClassDefFoundError: scala/tools/nsc/transform/patmat/PatternMatching
+ [quick.plugins] at scala.tools.selectivecps.SelectiveCPSTransform.newTransformer(SelectiveCPSTransform.scala:29)
+
+ WHY OH WHY!? scala/tools/nsc/transform/patmat/PatternMatching should be on locker.compiler.path
+ -->
<staged-scalac with="quick" stage="quick" project="plugins"
srcdir="continuations/library" destproject="continuations-library"
- args="-Xplugin-require:continuations -P:continuations:enable">
- <compilerarg value="-Xpluginsdir"/>
- <compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
- </staged-scalac>
+ args="-Xplugin-require:continuations -P:continuations:enable -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins"/>
<stopwatch name="quick.plugins.timer" action="total"/>
</do>
</staged-uptodate>
</target>
- <target name="quick.bin" depends="init">
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalacheck, quick.scalap, quick.interactive, quick.swing, quick.plugins, quick.partest, quick.scaladoc">
<staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
- <target name="quick.done" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalacheck, quick.scalap, quick.interactive, quick.swing, quick.plugins, quick.partest, quick.scaladoc, quick.bin"/>
+ <target name="quick.done" depends="quick.bin"/>
<target name="quick-opt" description="Optimized version of quick.done."> <optimized name="quick.done"/></target>
@@ -1140,9 +1235,6 @@ targets exercised:
</manifest>
</pre>
<jar-opts>
- <fileset dir="${build-quick.dir}/classes/scaladoc"/>
- <fileset dir="${build-quick.dir}/classes/interactive"/>
- <fileset dir="${build-quick.dir}/classes/repl"/>
<service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
</jar-opts>
</staged-pack>
@@ -1180,21 +1272,12 @@ targets exercised:
<!-- ===========================================================================
BOOTSTRAPPING BUILD (STRAP)
============================================================================ -->
- <target name="strap.lib" depends="quick.done">
- <staged-build with="quick" stage="strap" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes">
- <pre>
- <staged-javac stage="strap" project="library" args="-XDignore.symbol.file"/> <!-- TODO: args="-XDignore.symbol.file" necessary?? -->
- <staged-javac stage="strap" project="actors" destproject="library"/>
- </pre>
- <post>
- <staged-scalac with="quick" stage="strap" project="actors" destproject="library"/>
- </post>
- </staged-build>
+ <target name="strap.done" depends="pack.done">
+ <staged-build with="pack" stage="strap" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/>
+ <staged-build with="pack" stage="strap" project="reflect"/>
+ <staged-build with="pack" stage="strap" project="compiler"/>
</target>
- <target name="strap.reflect" depends="strap.lib"> <staged-build with="pack" stage="strap" project="reflect"/> </target>
- <target name="strap.comp" depends="strap.reflect"> <staged-build with="pack" stage="strap" project="compiler"/> </target>
- <target name="strap.done" depends="strap.comp"/>
<target name="strap-opt" description="Optimized version of strap.done."> <optimized name="strap.done"/></target>
@@ -1289,7 +1372,7 @@ targets exercised:
TEST SUITE
============================================================================ -->
<!-- bootstrapping stability: compare {quick,strap}/(lib|reflect|comp) -->
- <target name="test.stability" depends="quick.lib, quick.reflect, quick.comp, strap.lib, strap.reflect, strap.comp">
+ <target name="test.stability" depends="strap.done">
<exec osfamily="unix" vmlauncher="false" executable="${basedir}/tools/stability-test.sh" failonerror="true" />
<!-- I think doing it this way means it will auto-pass on windows... that's the idea. If not, something like this. -->
<!-- <exec osfamily="windows" executable="foo" failonerror="false" failifexecutionfails="false" /> -->
@@ -1371,80 +1454,78 @@ targets exercised:
<stopwatch name="quick.sbt-interface.timer" action="total"/>
</target>
- <property name="partest.srcdir" value="files" /> <!-- TODO: make targets for `pending` and other subdirs -->
+ <target name="test.junit.init" depends="quick.done">
+ <uptodate property="test.junit.available" targetfile="${build-junit.dir}/test-compile.complete">
+ <srcfiles dir="${test.junit.src}">
+ <include name="**/*.scala"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="test.junit.comp" depends="test.junit.init, quick.done" unless="test.junit.available">
+ <stopwatch name="test.junit.compiler.timer"/>
+ <mkdir dir="${test.junit.classes}"/>
+ <scalacfork
+ destdir="${test.junit.classes}"
+ compilerpathref="quick.compiler.path"
+ params="${scalac.args.quick}"
+ srcdir="${test.junit.src}"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="test.junit.compiler.build.path"/>
+ </scalacfork>
+ <touch file="${build-junit.dir}/test-compile.complete" verbose="no"/>
+ <stopwatch name="test.junit.compiler.timer" action="total"/>
+ </target>
- <target name="test.run" depends="pack.done">
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="1200000"
- srcdir="${partest.srcdir}"
- scalacopts="${scalac.args.optimise}">
-
- <compilationpath refid="partest.build.path"/>
- <runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
- <jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
- </partest>
+ <target name="test.junit" depends="test.junit.comp">
+ <stopwatch name="test.junit.timer"/>
+ <mkdir dir="${test.junit.classes}"/>
+ <junit fork="yes" haltonfailure="yes" showoutput="yes" printsummary="on">
+ <classpath refid="test.junit.compiler.build.path"/>
+ <batchtest fork="yes" todir="${build-junit.dir}">
+ <fileset dir="${test.junit.classes}">
+ <include name="**/*Test.class"/>
+ </fileset>
+ </batchtest>
+ <formatter type="plain"/>
+ </junit>
+ <stopwatch name="test.junit.timer" action="total"/>
</target>
+ <!-- See test/build-partest.xml for the macro(s) being used here. -->
+
<target name="test.suite" depends="pack.done">
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="2400000"
- srcdir="${partest.srcdir}"
- scalacopts="${scalac.args.optimise}">
- <compilationpath refid="partest.build.path"/>
- <postests dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
- <negtests dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
- <runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
- <jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
- <residenttests dir="${partest.dir}/${partest.srcdir}/res" includes="*.res"/>
- <scalaptests dir="${partest.dir}/${partest.srcdir}/scalap" includes="**/*.scala"/>
- <scalachecktests dir="${partest.dir}/${partest.srcdir}/scalacheck">
- <include name="*.scala"/>
- </scalachecktests>
- <specializedtests dir="${partest.dir}/${partest.srcdir}/specialized">
- <include name="*.scala"/>
- </specializedtests>
- <instrumentedtests dir="${partest.dir}/${partest.srcdir}/instrumented">
- <include name="*.scala"/>
- </instrumentedtests>
- </partest>
+ <testSuite/>
+ </target>
+
+ <target name="test.suite.color" depends="pack.done">
+ <testSuite colors="8"/>
+ </target>
+
+ <target name="test.run" depends="pack.done">
+ <testSuite kinds="run jvm"/>
</target>
<target name="test.continuations.suite" depends="pack.done">
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="2400000"
- srcdir="${partest.srcdir}"
- scalacopts="${scalac.args.optimise} -Xplugin-require:continuations -P:continuations:enable">
- <compilerarg value="-Xpluginsdir"/>
- <compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
- <compilationpath refid="partest.build.path"/>
- <negtests dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
- <runtests dir="${partest.dir}/${partest.srcdir}/continuations-run" includes="*.scala"/>
- </partest>
+ <testSuite kinds="continuations-neg continuations-run"
+ scalacOpts="${scalac.args.optimise} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
+ />
</target>
<target name="test.scaladoc" depends="pack.done">
- <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
- <compilationpath refid="partest.build.path"/>
- <runtests dir="${partest.dir}/scaladoc/run" includes="*.scala" />
- <scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
- </partest>
+ <testSuite kinds="run scalacheck" srcdir="scaladoc"/>
</target>
<target name="test.interactive" depends="pack.done">
- <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
- <compilationpath refid="partest.build.path"/>
- <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
- <include name="*/*.scala"/>
- </presentationtests>
- </partest>
+ <testSuite kinds="presentation"/>
</target>
<!-- for use in PR validation, where stability is rarely broken, so we're going to use starr for locker,
and skip test.stability (which requires locker == quick) -->
- <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.interactive, test.continuations.suite, test.scaladoc, test.suite"/>
+ <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.junit, test.interactive, test.continuations.suite, test.scaladoc, test.suite"/>
<target name="test.done" depends="test.core, test.stability"/>
-
<!-- ===========================================================================
BINARY COMPATIBILITY TESTING
============================================================================ -->
@@ -1757,10 +1838,16 @@ targets exercised:
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/continuations/library"/>
</jar>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-swing-src.jar" basedir="${src.dir}/swing"/>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-compiler-src.jar" basedir="${src.dir}/compiler"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-actors-src.jar" basedir="${src.dir}/actors"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-compiler-src.jar">
+ <fileset dir="${src.dir}/compiler"/>
+ <fileset dir="${src.dir}/repl"/>
+ <fileset dir="${src.dir}/scaladoc"/>
+ <fileset dir="${src.dir}/interactive"/>
+ <fileset dir="${src.dir}/continuations/plugin"/>
+ </jar>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-swing-src.jar" basedir="${src.dir}/swing"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar" basedir="${src.dir}/partest"/>
</target>
diff --git a/gitignore.SAMPLE b/gitignore.SAMPLE
index 483ad4caca..7114225a4a 100644
--- a/gitignore.SAMPLE
+++ b/gitignore.SAMPLE
@@ -5,13 +5,6 @@
*.jar
*~
-#sbt
-/project/target/
-/project/project/target
-
-/target/
-/src/jline/target/
-
# target directories for ant build
/build/
/dists/
diff --git a/lib/forkjoin.jar.desired.sha1 b/lib/forkjoin.jar.desired.sha1
index 8b24962e2c..8bb86f397d 100644
--- a/lib/forkjoin.jar.desired.sha1
+++ b/lib/forkjoin.jar.desired.sha1
@@ -1 +1 @@
-f93a2525b5616d3a4bee7848fabbb2856b56f653 *forkjoin.jar
+ddd7d5398733c4fbbb8355c049e258d47af636cf ?forkjoin.jar
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
index b0426130ac..1eb994cf1b 100644
--- a/lib/jline.jar.desired.sha1
+++ b/lib/jline.jar.desired.sha1
@@ -1 +1 @@
-a5261e70728c1847639e2b47d953441d0b217bcb *jline.jar
+e87ad04fdffb5cd9b7aa9293596d9fdde086eccd ?jline.jar
diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1
index 7dae1a905e..debbce2d7e 100644
--- a/lib/scala-compiler-src.jar.desired.sha1
+++ b/lib/scala-compiler-src.jar.desired.sha1
@@ -1 +1 @@
-f9f41fb909df6a0178906c9fd02e5d0efa15c9ed ?scala-compiler-src.jar
+19d04510ac6f25d088da82527d8435b68c00153d ?scala-compiler-src.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index bb22879679..4ec9610bae 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-c4cd524dc29d298a5034637f6b31122dccb300d6 ?scala-compiler.jar
+3585351c6a62186097be55fff88bee88a985f5c0 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index 641324256e..6e97551fc7 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-2cba5a13ef44bf93133be26cc89ba3a640a5c28f ?scala-library-src.jar
+e606934dc00ced6bfac715bbdba427f9c2c18bc7 ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 1b4eeac625..36aedb2ad7 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-d7c6e69eba3dba2f75a0f44e56480cd3dbab8931 ?scala-library.jar
+36456c52b0395fc1e6e367291e45bd503fa019c5 ?scala-library.jar
diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1
index f9d9618380..ebd6dcbf5a 100644
--- a/lib/scala-reflect-src.jar.desired.sha1
+++ b/lib/scala-reflect-src.jar.desired.sha1
@@ -1 +1 @@
-dbc00bd4b09012aa52e802926dee3f8a02a767ff ?scala-reflect-src.jar
+51787a41cae5b0ec6910c5a1a6af392e17550856 ?scala-reflect-src.jar
diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1
index 8663ef7c85..4378fec9d6 100644
--- a/lib/scala-reflect.jar.desired.sha1
+++ b/lib/scala-reflect.jar.desired.sha1
@@ -1 +1 @@
-c0eed5dee0a3204239c9b35134cad8b3ad140121 ?scala-reflect.jar
+d4a4c0aab882412461fbd9d39cf47da5a619855e ?scala-reflect.jar
diff --git a/project/Build.scala b/project/Build.scala
deleted file mode 100644
index efa8a7a038..0000000000
--- a/project/Build.scala
+++ /dev/null
@@ -1,334 +0,0 @@
-import sbt._
-import Keys._
-import partest._
-import ScalaBuildKeys._
-import Release._
-
-
-object ScalaBuild extends Build with Layers with Packaging with Testing {
-
- // Build wide settings:
- override lazy val settings = super.settings ++ Versions.settings ++ Seq(
- autoScalaLibrary := false,
- resolvers += Resolver.url(
- "Typesafe nightlies",
- url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
- )(Resolver.ivyStylePatterns),
- resolvers ++= Seq(
- "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
- ScalaToolsSnapshots
- ),
- organization := "org.scala-lang",
- version <<= Versions.mavenVersion,
- pomExtra := epflPomExtra
- )
-
- // Collections of projects to run 'compile' on.
- lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin)
- // Collection of projects to 'package' and 'publish' together.
- lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap)
- lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
-
- private def epflPomExtra = (
- <xml:group>
- <inceptionYear>2002</inceptionYear>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html</url>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- </scm>
- <issueManagement>
- <system>jira</system>
- <url>http://issues.scala-lang.org</url>
- </issueManagement>
- </xml:group>
- )
-
- // Settings used to make sure publishing goes smoothly.
- def publishSettings: Seq[Setting[_]] = Seq(
- ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
- pomIncludeRepository := (_ => false),
- publishMavenStyle := true,
- makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
- pomExtra := epflPomExtra
- )
-
- // Settings for root project. These are aggregate tasks against the rest of the build.
- def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
- doc in Compile <<= (doc in documentation in Compile).identity,
- // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
- compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
- // TODO - just clean target? i.e. target map IO.deleteRecursively
- clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
- packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
- // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
- publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
- publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
- packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
- packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
- test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
- lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
- Seq(lib,comp).foreach(f => IO.touch(f))
- },
- lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
- Seq(lib,comp).foreach(IO.delete)
- },
- genBinQuick <<= (genBinQuick in scaladist).identity,
- makeDist <<= (makeDist in scaladist).identity,
- makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
- // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
- unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
- Seq("library/scala","actors","compiler","swing","continuations/library","forkjoin") map (dir / _)
- },
- // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
- commands += Release.pushStarr
- )
- // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
- lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
-
- // External dependencies used for various projects
- lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
- Seq(
- "org.apache.ant" % "ant" % "1.8.2",
- "org.scala-sbt" % "compiler-interface" % v % "provided"
- )
- )
-
- def fixArtifactSrc(dir: File, name: String) = name match {
- case x if x startsWith "scala-" => dir / "src" / (name drop 6)
- case x => dir / "src" / name
- }
-
- // These are setting overrides for most artifacts in the Scala build file.
- def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
- crossPaths := false,
- autoScalaLibrary := false,
- // Work around a bug where scala-library (and forkjoin) is put on classpath for analysis.
- classpathOptions := ClasspathOptions.manual,
- publishArtifact in packageDoc := false,
- publishArtifact in packageSrc := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- (classDirectory in Compile) <<= target(_ / "classes"),
- javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
- scalaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
- javaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
- unmanagedJars in Compile := Seq(),
- // Most libs in the compiler use this order to build.
- compileOrder in Compile := CompileOrder.JavaThenScala,
- lockFile <<= target(_ / "compile.lock"),
- skip in Compile <<= lockFile map (_.exists),
- lock <<= lockFile map (f => IO.touch(f)),
- unlock <<= lockFile map IO.delete
- )
-
- // --------------------------------------------------------------
- // Libraries used by Scalac that change infrequently
- // (or hopefully so).
- // --------------------------------------------------------------
-
- // Jline nested project. Compile this sucker once and be done.
- lazy val jline = Project("jline", file("src/jline"))
- // Our wrapped version of asm.
- lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*)
- // Forkjoin backport
- lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
-
- // --------------------------------------------------------------
- // The magic kingdom.
- // Layered compilation of Scala.
- // Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
- // --------------------------------------------------------------
-
- // Need a report on this...
- // TODO - Resolve STARR from a repo..
- lazy val STARR = scalaInstance <<= (appConfiguration, ShaResolve.pullBinaryLibs in ThisBuild) map { (app, _) =>
- val launcher = app.provider.scalaProvider.launcher
- val library = file("lib/scala-library.jar")
- val compiler = file("lib/scala-compiler.jar")
- val libJars = (file("lib") * "*.jar").get filterNot Set(library, compiler)
- ScalaInstance("starr", library, compiler, launcher, libJars: _*)
- }
-
- // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
- lazy val (lockerLib, lockerReflect, lockerComp) = makeLayer("locker", STARR, autoLock = true)
- lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerReflect, lockerComp)
-
- // Quick is the general purpose project layer for the Scala compiler.
- lazy val (quickLib, quickReflect, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerReflect, lockerComp))
- lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickReflect, quickComp)
-
- // Reference to quick scala instance.
- lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickReflect, quickComp)
- def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
- def quickScalaReflectDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickReflect in Compile).identity
- def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
-
- // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
- lazy val (strappLib, strappReflect, strappComp) = makeLayer("strapp", quickScalaInstance)
-
- // --------------------------------------------------------------
- // Projects dependent on layered compilation (quick)
- // --------------------------------------------------------------
- def addCheaterDependency(projectName: String): Setting[_] =
- pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
- val dependency: scala.xml.Node =
- <dependency>
- <groupId>{o}</groupId>
- <artifactid>{projectName}</artifactid>
- <version>{v}</version>
- </dependency>
- def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
- case <dependencies>{nested@_*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
- case x => x
- }
- // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
- def hasDependencies(root: scala.xml.Node): Boolean =
- (root.child collectFirst {
- case n: scala.xml.Elem if n.label == "dependencies" => n
- } isEmpty)
- // TODO - Keep namespace on project...
- k andThen {
- case n @ <project>{ nested@_*}</project> if hasDependencies(n) =>
- <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
- case <project>{ nested@_*}</project> =>
- <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
- }
- }
-
- // TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
- lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
- lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
- lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
- // This project will generate man pages (in man1 and html) for scala.
- lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
- lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
-
- // Things that compile against the compiler.
- lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaReflectDependency, quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
-
- lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
- lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
- lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap,asm)
- lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
- name := "scalap",
- exportJars := true
- )
- lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
-
- // --------------------------------------------------------------
- // Continuations plugin + library
- // --------------------------------------------------------------
- lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
- scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
- resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
- exportJars := true,
- name := "continuations" // Note: This artifact is directly exported.
-
- )
- lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
- lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
- scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
- scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
- case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
- }
- )
- lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
-
- // TODO - OSGi Manifest
-
- // --------------------------------------------------------------
- // Real Library Artifact
- // --------------------------------------------------------------
- val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
- def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
- lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
- name := "scala-library",
- crossPaths := false,
- exportJars := true,
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
- packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
- fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
- quickScalaInstance,
- target <<= (baseDirectory, name) apply (_ / "target" / _)
- )
- lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
-
- // --------------------------------------------------------------
- // Real Reflect Artifact
- // --------------------------------------------------------------
-
- lazy val packageScalaReflect = Seq(quickReflect).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaReflectArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaReflect)) ++ Seq(
- name := "scala-reflect",
- crossPaths := false,
- exportJars := true,
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
- quickScalaInstance,
- target <<= (baseDirectory, name) apply (_ / "target" / _)
- )
- lazy val scalaReflect = Project("scala-reflect", file(".")) settings(publishSettings:_*) settings(scalaReflectArtifactSettings:_*) dependsOn(scalaLibrary)
-
-
- // --------------------------------------------------------------
- // Real Compiler Artifact
- // --------------------------------------------------------------
- lazy val packageScalaBinTask = Seq(quickComp, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
- lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
- name := "scala-compiler",
- crossPaths := false,
- exportJars := true,
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
- quickScalaInstance,
- target <<= (baseDirectory, name) apply (_ / "target" / _)
- )
- lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
- lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
-
-
- // --------------------------------------------------------------
- // Generating Documentation.
- // --------------------------------------------------------------
-
- // TODO - Migrate this into the dist project.
- // Scaladocs
- lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
- // TODO - Make these work for realz.
- defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
- "reflect/Print.scala" ||
- "reflect/Symbol.scala" ||
- "reflect/Tree.scala" ||
- "reflect/Type.scala" ||
- "runtime/*$.scala" ||
- "runtime/ScalaRuntime.scala" ||
- "runtime/StringAdd.scala" ||
- "scala/swing/test/*"),
- sourceFilter in Compile := ("*.scala"),
- unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
- Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
- },
- compile := inc.Analysis.Empty,
- // scaladocOptions in Compile <++= (baseDirectory) map (bd =>
- // Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
- // "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
- // "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
- // "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
- // )),
- classpathOptions in Compile := ClasspathOptions.manual
- )
- lazy val documentation = (
- Project("documentation", file("."))
- settings (documentationSettings: _*)
- dependsOn(quickLib, quickComp, actors, forkjoin, swing, continuationsLibrary)
- )
-}
diff --git a/project/Layers.scala b/project/Layers.scala
deleted file mode 100644
index 6c939d0ff7..0000000000
--- a/project/Layers.scala
+++ /dev/null
@@ -1,116 +0,0 @@
-import sbt._
-import Keys._
-import com.jsuereth.git.GitKeys.gitRunner
-import ScalaBuildKeys.lock
-
-/** This trait stores all the helper methods to generate layers in Scala's layered build. */
-trait Layers extends Build {
- // TODO - Clean this up or use a self-type.
-
- /** Default SBT overrides needed for layered compilation. */
- def settingOverrides: Seq[Setting[_]]
- /** Reference to the jline project */
- def jline: Project
- /** Reference to forkjoin library */
- def forkjoin: Project
- /** Reference to the ASM wrapped project. */
- def asm: Project
- /** A setting that adds some external dependencies. */
- def externalDeps: Setting[_]
- /** The root project. */
- def aaa_root: Project
-
- /** Creates a reference Scala version that can be used to build other projects. This takes in the raw
- * library, compiler as well as a string representing the layer name (used for compiling the compile-interface).
- */
- def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) =
- scalaInstance <<= (appConfiguration in library,
- version in library,
- (exportedProducts in library in Compile),
- (exportedProducts in reflect in Compile),
- (exportedProducts in compiler in Compile),
- (fullClasspath in jline in Runtime),
- (exportedProducts in asm in Runtime)) map {
- (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, jline: Classpath, asm: Classpath) =>
- val launcher = app.provider.scalaProvider.launcher
- (lib,comp) match {
- case (Seq(libraryJar), Seq(compilerJar)) =>
- ScalaInstance(
- version + "-" + layer + "-",
- libraryJar.data,
- compilerJar.data,
- launcher,
- ((jline.files ++ asm.files ++ reflect.files):_*))
- case _ => error("Cannot build a ScalaReference with more than one classpath element")
- }
- }
-
- /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
- * Returns the library project and compiler project from the next layer.
- * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors".
- */
- def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = {
- val autoLockSettings: Seq[Setting[_]] =
- if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) =>
- c flatMapR { cResult =>
- val result = Result.tryValue(cResult)
- l mapR { tx => result }
- }
- })
- else Seq.empty
-
-
- val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(autoLockSettings:_*) settings(
- version := layer,
- // TODO - use depends on.
- unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
- managedClasspath in Compile := Seq(),
- scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
- resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),
- defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
- // TODO - Allow other scalac option settings.
- scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
- resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("library.properties"),
- referenceScala
- )
-
- // Define the reflection
- val reflect = Project(layer + "-reflect", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
- version := layer,
- scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "reflect"),
- resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "reflect"),
- defaultExcludes := ("tests"),
- defaultExcludes in unmanagedResources := "*.scala",
- resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("reflect.properties"),
- // TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library).map(exportedProducts in Compile in _).join.map(_.flatten),
- externalDeps,
- referenceScala
- )
-
- // Define the compiler
- val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
- version := layer,
- scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
- resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
- defaultExcludes := ("tests"),
- defaultExcludes in unmanagedResources := "*.scala",
- resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("compiler.properties"),
- // Note, we might be able to use the default task, but for some reason ant was filtering files out. Not sure what's up, but we'll
- // stick with that for now.
- unmanagedResources in Compile <<= (baseDirectory) map {
- (bd) =>
- val dirs = Seq(bd / "src" / "compiler")
- dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
- },
- // TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
- externalDeps,
- referenceScala
- )
-
- // Return the generated projects.
- (library, reflect, compiler)
- }
-
-}
diff --git a/project/Packaging.scala b/project/Packaging.scala
deleted file mode 100644
index b0060283ac..0000000000
--- a/project/Packaging.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-import sbt._
-import Keys._
-import ScalaBuildKeys._
-
-/** All the settings related to *packaging* the built scala software. */
-trait Packaging { self: ScalaBuild.type =>
-
- // --------------------------------------------------------------
- // Packaging a distro
- // --------------------------------------------------------------
- lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
- crossPaths := false,
- target <<= (baseDirectory, name) apply (_ / "target" / _),
- scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
- autoScalaLibrary := false,
- unmanagedJars in Compile := Seq(),
- genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
- binDir <<= target(_/"bin"),
- genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
- binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
- // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
- fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
- fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
- genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
- runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
- runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
- // TODO - We could *really* clean this up in many ways. Let's look into making a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
- // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
- // really need to figure out a better way to pull jline + jansi.
- makeDistMappings <<= (genBin,
- runManmakerMan,
- runManmakerHtml,
- packageBin in scalaLibrary in Compile,
- packageBin in scalaCompiler in Compile,
- packageBin in jline in Compile,
- packageBin in continuationsPlugin in Compile,
- managedClasspath in jline in Compile,
- packageBin in scalap in Compile) map {
- (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
- val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
- binaries ++ man ++ html ++ jlineDepMap ++ Seq(
- lib -> "lib/scala-library.jar",
- comp -> "lib/scala-compiler.jar",
- jline -> "lib/jline.jar",
- continuations -> "misc/scala-devel/plugins/continuations.jar",
- scalap -> "lib/scalap.jar"
- )
- },
- // Add in some more dependencies
- makeDistMappings <+= (packageBin in swing in Compile) map (s => s -> "lib/scala-swing.jar"),
- makeDistMappings <+= (packageBin in scalaReflect in Compile) map (s => s -> "lib/scala-reflect.jar"),
- makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
- s.log.debug("Map = " + maps.mkString("\n"))
- val file = dir / "target" / "scala-dist.zip"
- IO.zip(maps, file)
- s.log.info("Created " + file.getAbsolutePath)
- file
- },
- makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
- def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
- IO.createDirectory(dir)
- IO.copy(for {
- (file, name) <- maps
- val file2 = dir / name
- if !sameFile(file,file2)
- } yield (file, file2))
- // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
- maps map (_._2) filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
- dir
- }
- )
- lazy val scaladist = (
- Project("dist", file("."))
- settings (scalaDistSettings: _*)
- )
-
-
-// Helpers to make a distribution
-
- /** Generates runner scripts for distribution. */
- def genBinTask(
- runner: ScopedTask[ScalaToolRunner],
- outputDir: ScopedSetting[File],
- classpath: ScopedTask[Classpath],
- useClasspath: Boolean
- ): Project.Initialize[sbt.Task[Seq[(File,String)]]] = {
- (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
- IO.createDirectory(outDir)
- val classToFilename = Seq(
- "scala.tools.nsc.MainGenericRunner" -> "scala",
- "scala.tools.nsc.Main" -> "scalac",
- "scala.tools.nsc.ScalaDoc" -> "scaladoc",
- "scala.tools.nsc.CompileClient" -> "fsc",
- "scala.tools.scalap.Main" -> "scalap"
- )
- if (useClasspath) {
- val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
- s.log.debug("Setting classpath = " + classpath)
- runner setClasspath classpath
- }
- def genBinFiles(cls: String, dest: File) = {
- runner.setClass(cls)
- runner.setFile(dest)
- runner.execute()
- // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
- dest.setExecutable(true)
- }
- def makeBinMappings(cls: String, binName: String): Seq[(File,String)] = {
- val file = outDir / binName
- val winBinName = binName + ".bat"
- genBinFiles(cls, file)
- Seq( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
- }
- classToFilename.flatMap((makeBinMappings _).tupled)
- }
- }
- /** Creates man pages for distribution. */
- def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Seq[(File,String)]]] =
- (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
- val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
- binaries map { bin =>
- val file = target / "man" / dir / (bin + ext)
- val classname = "scala.man1." + bin
- IO.createDirectory(file.getParentFile)
- toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
- file -> ("man/" + dir + "/" + bin + ext)
- }
- }
-}
diff --git a/project/Partest.scala b/project/Partest.scala
deleted file mode 100644
index 2ea41ba80b..0000000000
--- a/project/Partest.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-import Project.Initialize
-import complete._
-import scala.collection.{ mutable, immutable }
-
-/** This object */
-object partest {
-
- /** The key for the run-partest task that exists in Scala's test suite. */
- lazy val runPartest = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
- lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
- lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
- lazy val runPartestGrep = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
- lazy val partestRunner = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
- lazy val partestTests = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
- lazy val partestDirs = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
-
- lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
- javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
- partestDirs <<= baseDirectory apply { bd =>
- partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
- },
- partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
- partestTests <<= partestTestsTask(partestDirs),
- runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
- runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
- runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
- )
-
- // What's fun here is that we want "*.scala" files *and* directories in the base directory...
- def partestResources(base: File, testType: String): PathFinder = testType match {
- case "res" => base ** "*.res"
- // TODO - Only allow directories that have "*.scala" children...
- case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
- }
- lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
-
- // TODO - Figure out how to specify only a subset of resources...
- def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
- testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
-
- // TODO - Split partest task into Configurations and build a Task for each Configuration.
- // *then* mix all of them together for run-testsuite or something clever like this.
- def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
- (runner, testRuns, scalacOptions, streams) map {
- (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
- }
- }
- private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
- val testArgs = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
- val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
-
- import collection.JavaConverters._
- val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
- // TODO - save results
- val failures = results collect {
- case (path, "FAIL") => path + " [FAILED]"
- case (path, "TIMEOUT") => path + " [TIMEOUT]"
- }
-
- if (failures.isEmpty)
- s.log.info(""+results.size+" tests passed.")
- else {
- failures foreach (s.log error _)
- error("Test Failures! ("+failures.size+" of "+results.size+")")
- }
- }
-
- def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
- (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
-
- /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
- def cleanFileName(file: File): String = {
- // TODO - Something intelligent here
- val TestPattern = ".*/test/(.*)".r
- file.getCanonicalPath match {
- case TestPattern(n) => n
- case _ => file.getName
- }
- }
-
- // TODO - Allow a filter for the second part of this...
- def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
- import DefaultParsers._
- state => {
- Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
- val files: Set[String] = testDirs get kind match {
- case Some(dir) =>
- partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
- case _ =>
- Set()
- }
- Space ~> token(NotSpace examples files) map (kind -> _)
- }
- }
- }
-
- def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
- import sbinary.DefaultProtocol._
-
- InputTask(testDirs apply runSingleTestParser) { result =>
- (runner, result, testDirs, scalacOptions, streams) map {
- case (r, (kind, filter), dirs, o, s) =>
- // TODO - Use partest resources somehow to filter the filter correctly....
- val files: Seq[File] =
- if (filter == "*") partestResources(dirs(kind), kind).get
- else (dirs(kind) * filter).get
-
- runPartestImpl(r, Map(kind -> files), o, s)
- }
- }
- }
-
- def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: TaskKey[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
- (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
-}
-
-class PartestRunner(classpath: Seq[File], javaOpts: String) {
- // Classloader that does *not* have this as parent, for differing Scala version.
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
- lazy val (mainClass, mainMethod) = try {
- val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
- val m = c.getMethod("mainReflect", classOf[Array[String]])
- (c,m)
- }
- lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
- def run(args: Array[String]): java.util.Map[String,String] = try {
- // TODO - undo this settings after running. Also globals are bad.
- System.setProperty("partest.java_opts", javaOpts)
- val allArgs = (classPathArgs ++ args).toArray
- mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,String]]
- } catch {
- case e =>
- //error("Could not run Partest: " + e)
- throw e
- }
-}
diff --git a/project/Release.scala b/project/Release.scala
deleted file mode 100644
index feab8bdc8c..0000000000
--- a/project/Release.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-import sbt._
-import Keys._
-
-object Release {
-
- // TODO - Just make the STARR artifacts and dump the sha1 files.
-
- val starrLibs = Seq("scala-library.jar", "scala-reflect.jar", "scala-compiler.jar", "jline.jar")
-
- val pushStarr = Command.command("new-starr") { (state: State) =>
- /*val extracted = Project.extract(state)
- import extracted._
- // First run tests
- val (s1, result) = runTask(test in Test, state)
- // If successful, package artifacts
- val (s2, distDir) = runTask(makeExplodedDist, s1)
- // Then copy new libs in place
- val bd = extracted get baseDirectory
- for {
- jarName <- starrLibs
- jar = distDir / "lib" / jarName
- if jar.exists
- } IO.copyFile(jar, bd / "lib" / jarName)
- // Invalidate SHA1 files.
- ShaResolve.removeInvalidShaFiles(bd)
- // Now run tests *again*?
- s2*/
- state
- }
-}
diff --git a/project/RemoteDependencies.scala b/project/RemoteDependencies.scala
deleted file mode 100644
index 705b9dc402..0000000000
--- a/project/RemoteDependencies.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-import sbt._
-import Keys._
-import ScalaBuildKeys._
-
-
-object RemoteDependencies {
- def buildSettings(externalProjects: Set[URI], localScala: Setting[_]): Seq[Setting[_]] = Seq(
- commands += Command.command("fix-uri-projects") { (state: State) =>
- if(state.get(buildFixed) getOrElse false) state
- else {
- // TODO -fix up scalacheck's dependencies!
- val extracted = Project.extract(state)
- import extracted._
- val scalaVersionString = extracted get version
-
- def fix(s: Setting[_]): Setting[_] = s match {
- case ScopedExternalSetting(p, scalaInstance.key, setting) if externalProjects(p) => localScala mapKey Project.mapScope(_ => s.key.scope)
- // TODO - Fix Actors dependency...
- //case ScopedExternalSetting(p, libraryDependencies.key, setting) if externalProjects(p) => fixProjectDeps(s)
- case s => s
- }
- val transformed = session.mergeSettings map ( s => fix(s) )
- val scopes = transformed collect { case ScopedExternalSetting(p, _, s) if externalProjects(p) => s.key.scope } toSet
- // Create some fixers so we don't download scala or rely on it.
- // Also add dependencies that disappear in 2.10 for now...
- val fixers = for { scope <- scopes
- setting <- Seq(autoScalaLibrary := false,
- crossPaths := false,
- scalaVersion := scalaVersionString)
- } yield setting mapKey Project.mapScope(_ => scope)
- val newStructure = Load.reapply(transformed ++ fixers, structure)
- Project.setProject(session, newStructure, state).put(buildFixed, true)
- }
- },
- onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
- "fix-uri-projects" :: state
- })
- )
-}
-
-
-
-/** Matcher to make updated remote project references easier. */
-object ScopedExternalSetting {
- def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
- s.key.scope.project match {
- case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
- case _ => None
- }
-}
-
-
-
diff --git a/project/Sametest.scala b/project/Sametest.scala
deleted file mode 100644
index 6f12eb24b3..0000000000
--- a/project/Sametest.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-
-// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
-object SameTest {
-
- def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
- (classDirectory in Compile in lhs, classDirectory in Compile in rhs,
- compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) =>
- // Now we generate a complete set of relative files and then
- def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
- // This code adapted from SameTask in the compiler.
- def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
- filePairs exists { case (a,b) =>
- if (!a.canRead || !b.canRead) {
- s.log.error("Either ["+a+"] or ["+b+"] is missing.")
- true
- } else {
- s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")
- val diff = !checkSingleFilePair(a,b)
- if(diff) s.log.error("["+a+"] differs from ["+b+"]")
- diff
- }
- }
- }
- val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
- val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
- val result = hasDifferentFiles(comparisons)
- if (result) error("Binary artifacts differ.")
- }
-
- val bufferSize = 1024
-
- // Tests whether two files are binary equivalents of each other.
- def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
- Using.fileInputStream(originFile) { originStream =>
- Using.fileInputStream(destFile) { destStream =>
- val originBuffer = new Array[Byte](bufferSize)
- val destBuffer = new Array[Byte](bufferSize)
- var equalNow = true
- var originRemaining = originStream.read(originBuffer)
- var destRemaining = destStream.read(destBuffer)
- while (originRemaining > 0 && equalNow) {
- if (originRemaining == destRemaining) {
- for (idx <- 0 until originRemaining) {
- equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
- }
- } else {
- equalNow = false
- }
- originRemaining = originStream.read(originBuffer)
- destRemaining = destStream.read(destBuffer)
- }
- if (destRemaining > 0) equalNow = false
- equalNow
- }
- }
- }
-
-
-}
diff --git a/project/ScalaBuildKeys.scala b/project/ScalaBuildKeys.scala
deleted file mode 100644
index 9e495de19f..0000000000
--- a/project/ScalaBuildKeys.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import sbt._
-import Keys._
-
-object ScalaBuildKeys {
- val lockerLock = TaskKey[Unit]("locker-lock", "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
- val lockerUnlock = TaskKey[Unit]("locker-unlock", "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
- val lockFile = SettingKey[File]("lock-file", "Location of the lock file compiling this project.")
- val lock = TaskKey[Unit]("lock", "Locks this project so it won't be recompiled.")
- val unlock = TaskKey[Unit]("unlock", "Unlocks this project so it will be recompiled.")
- val makeDist = TaskKey[File]("make-dist", "Creates a mini-distribution (scala home directory) for this build in a zip file.")
- val makeExplodedDist = TaskKey[File]("make-exploded-dist", "Creates a mini-distribution (scala home directory) for this build in a directory.")
- val makeDistMappings = TaskKey[Seq[(File, String)]]("make-dist-mappings", "Creates distribution mappings for creating zips,jars,directorys,etc.")
- val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
- val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner", "Creates a utility to generate script files for Scala.")
- val genBin = TaskKey[Seq[(File,String)]]("gen-bin", "Creates script files for Scala distribution.")
- val binDir = SettingKey[File]("binaries-directory", "Directory where binary scripts will be located.")
- val genBinQuick = TaskKey[Seq[(File,String)]]("gen-quick-bin", "Creates script files for testing against current Scala build classfiles (not local dist).")
- val runManmakerMan = TaskKey[Seq[(File,String)]]("make-man", "Runs the man maker project to generate man pages")
- val runManmakerHtml = TaskKey[Seq[(File,String)]]("make-html", "Runs the man maker project to generate html pages")
- val checkSame = TaskKey[Unit]("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
- val checkSameLibrary = TaskKey[Unit]("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
- val checkSameCompiler = TaskKey[Unit]("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
-}
diff --git a/project/ScalaToolRunner.scala b/project/ScalaToolRunner.scala
deleted file mode 100644
index d7338a54b3..0000000000
--- a/project/ScalaToolRunner.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import sbt._
-import Keys._
-
-/** Reflection helper that runs ScalaTool.
- * TODO - When SBT is on 2.10.x try to use Dynamic + Reflection. COULD BE FUN.
- */
-class ScalaToolRunner(classpath: Classpath) {
- // TODO - Don't use the ant task directly...
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
- lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
- lazy val executeMethod = mainClass.getMethod("execute")
- lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
- lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
- lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
- lazy val instance = mainClass.newInstance()
-
- def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
- def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
- def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
- def execute(): Unit = executeMethod.invoke(instance)
-}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
deleted file mode 100644
index e5b25a29cf..0000000000
--- a/project/ShaResolve.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-import Project.Initialize
-import scala.collection.{ mutable, immutable }
-import scala.collection.parallel.CompositeThrowable
-import java.security.MessageDigest
-
-case class Credentials(user: String, pw: String)
-
-/** Helpers to resolve SHA artifacts from typesafe repo. */
-object ShaResolve {
- import dispatch.{Http,url}
- val remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
-
- val pullBinaryLibs = TaskKey[Unit]("pull-binary-libs", "Pulls binary libs by the SHA key.")
- val pushBinaryLibs = TaskKey[Unit]("push-binary-libs", "Pushes binary libs whose SHA has changed.")
- val binaryLibCache = SettingKey[File]("binary-lib-cache", "Location of the cache of binary libs for this scala build.")
-
- def settings: Seq[Setting[_]] = Seq(
- binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
- pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs,
- pushBinaryLibs in ThisBuild <<= (baseDirectory, streams) map getCredentialsAndPushFiles
- )
-
- def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
- val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
- for {
- (file, name) <- (files x relativeTo(dir)).par
- uri = name.dropRight(13).replace('\\', '/')
- jar = dir / uri
- if !jar.exists || !isValidSha(file)
- sha = getShaFromShafile(file)
- } pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
- }
-
- /** This method removes all SHA1 files that don't match their corresponding JAR. */
- def removeInvalidShaFiles(dir: File): Unit = {
- val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
- for {
- (file, name) <- (files x relativeTo(dir)).par
- uri = name.dropRight(13).replace('\\', '/')
- jar = dir / uri
- if !jar.exists || !isValidSha(file)
- } IO.delete(jar)
- }
- def getCredentials: Credentials = System.out.synchronized {
- val user = (SimpleReader.readLine("Please enter your STARR username> ") getOrElse error("No username provided."))
- val password = (SimpleReader.readLine("Please enter your STARR password> ", Some('*')) getOrElse error("No password provided."))
- Credentials(user, password)
- }
-
- def getCredentialsAndPushFiles(dir: File, s: TaskStreams): Unit =
- pushFiles(dir, getCredentials, s)
-
- def pushFiles(dir: File, cred: Credentials, s: TaskStreams): Unit = loggingParallelExceptions(s) {
- val files = (dir / "test" / "files" ** "*.jar") +++ (dir / "lib" ** "*.jar")
- for {
- (jar, name) <- (files x relativeTo(dir)).par
- shafile = dir / (name + ".desired.sha1")
- if !shafile.exists || !isValidSha(shafile)
- } pushFile(jar, name, cred, s)
- }
-
- @inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
- case t: CompositeThrowable =>
- s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
- t.throwables foreach (t2 => s.log.trace(t2))
- throw t
- }
-
- // TODO - Finish this publishing aspect.
-
- def getShaFromShafile(file: File): String = parseShaFile(file)._2
-
- // This should calculate the SHA sum of a file the same as the linux process.
- def calculateSha(file: File): String = {
- val digest = MessageDigest.getInstance("SHA1")
- val in = new java.io.FileInputStream(file);
- val buffer = new Array[Byte](8192)
- try {
- def read(): Unit = in.read(buffer) match {
- case x if x <= 0 => ()
- case size => digest.update(buffer, 0, size); read()
- }
- read()
- } finally in.close()
- val sha = convertToHex(digest.digest())
- sha
- }
-
- def convertToHex(data: Array[Byte]): String = {
- def byteToHex(b: Int) =
- if ((0 <= b) && (b <= 9)) ('0' + b).toChar
- else ('a' + (b-10)).toChar
- val buf = new StringBuffer
- for (i <- 0 until data.length) {
- buf append byteToHex((data(i) >>> 4) & 0x0F)
- buf append byteToHex(data(i) & 0x0F)
- }
- buf.toString
- }
- // Parses a sha file into a file and a sha.
- def parseShaFile(file: File): (File, String) =
- IO.read(file).split("\\s") match {
- case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
- case Array(sha, filename) if filename.startsWith("*") => (new File(file.getParentFile, filename.drop(1)), sha)
- case Array(sha, filename) => (new File(file.getParentFile, filename), sha)
- case _ => error(file.getAbsolutePath + " is an invalid sha file")
- }
-
-
- def isValidSha(file: File): Boolean =
- try {
- val (jar, sha) = parseShaFile(file)
- jar.exists && calculateSha(jar) == sha
- } catch {
- case t: Exception => false
- }
-
-
- def pullFile(file: File, uri: String, cacheDir: File, sha: String, s: TaskStreams): Unit = {
- val cachedFile = cacheDir / uri
- if (!cachedFile.exists || calculateSha(cachedFile) != sha) {
- // Ensure the directory for the cache exists.
- cachedFile.getParentFile.mkdirs()
- val url = remote_urlbase + "/" + uri
- val fous = new java.io.FileOutputStream(cachedFile)
- s.log.info("Pulling [" + cachedFile + "] to cache")
- try Http(dispatch.url(url) >>> fous) finally fous.close()
- }
- s.log.info("Pulling [" + file + "] from local cache")
- IO.copyFile(cachedFile, file)
- }
-
- // Pushes a file and writes the new .desired.sha1 for git.
- def pushFile(file: File, uri: String, cred: Credentials, s: TaskStreams): Unit = {
- val sha = calculateSha(file)
- val url = remote_urlbase + "/" + sha + "/" + uri
- val sender = dispatch.url(url).PUT.as(cred.user,cred.pw) <<< (file, "application/java-archive")
- // TODO - output to logger.
- Http(sender >>> System.out)
- val shafile = file.getParentFile / (file.getName + ".desired.sha1")
- IO.touch(shafile)
- IO.write(shafile, sha + " ?" + file.getName)
- }
-}
diff --git a/project/Testing.scala b/project/Testing.scala
deleted file mode 100644
index 5b4135a31a..0000000000
--- a/project/Testing.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-import sbt._
-import Keys._
-import partest._
-import SameTest._
-import ScalaBuildKeys._
-
-/** All settings/projects relating to testing. */
-trait Testing { self: ScalaBuild.type =>
-
- lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
- unmanagedBase <<= baseDirectory / "test/files/lib",
- fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
- autoScalaLibrary := false,
- checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
- checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
- checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
- autoScalaLibrary := false
- )
- lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
- scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
- case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
- },
- partestDirs <<= baseDirectory apply { bd =>
- def mkFile(name: String) = bd / "test" / "files" / name
- def mkTestType(name: String) = name.drop("continuations-".length).toString
- Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
- }
- )
- val testsuite = (
- Project("testsuite", file("."))
- settings (testsuiteSettings:_*)
- dependsOn (scalaLibrary, scalaCompiler, partest, scalacheck)
- )
- val continuationsTestsuite = (
- Project("continuations-testsuite", file("."))
- settings (continuationsTestsuiteSettings:_*)
- dependsOn (partest, scalaLibrary, scalaCompiler)
- )
-
-}
-
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
deleted file mode 100644
index c8eebb1159..0000000000
--- a/project/VerifyClassLoad.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import sbt._
-
-import Build._
-import Keys._
-
-// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
-object VerifyClassLoad {
- lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
- lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
-
-
- def settings: Seq[Setting[_]] = Seq(
- checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
- fullClasspath in checkClassLoad := Seq(),
- checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
- import collection.JavaConverters._
- val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
-
- s.log.info("Processed " + results.size + " classes.")
- val errors = results.filter(_._2 != null)
- for( (name, result) <- results; if result != null) {
- s.log.error(name + " had error: " + result)
- }
- if(errors.size > 0) error("Classload validation errors encountered")
- ()
- }
- )
-
- // TODO - Use
- class ClassVerifyRunner(classpath: Seq[File]) {
- // Classloader that does *not* have this as parent, for differing Scala version.
- lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
- lazy val (mainClass, mainMethod) = try {
- val c = classLoader.loadClass("scala.tools.util.VerifyClass")
- val m = c.getMethod("run", classOf[Array[String]])
- (c,m)
- }
- def run(args: Array[String]): java.util.Map[String,String] = try {
- mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
- } catch {
- case e =>
- //error("Could not run Partest: " + e)
- throw e
- }
- }
-}
diff --git a/project/Versions.scala b/project/Versions.scala
deleted file mode 100644
index 57e274c15c..0000000000
--- a/project/Versions.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-import sbt._
-import Keys._
-import java.util.Properties
-import scala.util.control.Exception.catching
-import java.lang.{NumberFormatException => NFE}
-import java.io.FileInputStream
-import com.jsuereth.git.GitRunner
-import com.jsuereth.git.GitKeys.gitRunner
-
-case class VersionInfo(canonical: String,
- maven: String,
- osgi: String)
-
-/** this file is responsible for setting up Scala versioning schemes and updating all the necessary bits. */
-object Versions {
- val buildNumberFile = SettingKey[File]("scala-build-number-file")
- // TODO - Make this a setting?
- val buildNumberProps = SettingKey[BaseBuildNumber]("scala-build-number-props")
- val buildRelease = SettingKey[Boolean]("scala-build-release", "This is set to true if we're building a release.")
- val mavenSuffix = SettingKey[String]("scala-maven-suffix", "This is set to whatever maven suffix is required.")
-
- val gitSha = TaskKey[String]("scala-git-sha", "The sha of the current git commit.")
- val gitDate = TaskKey[String]("scala-git-date", "The date of the current git commit.")
-
- val mavenVersion = SettingKey[String]("scala-maven-version", "The maven version number.")
- val osgiVersion = TaskKey[String]("scala-osgi-version", "The OSGi version number.")
- val canonicalVersion = TaskKey[String]("scala-canonical-version", "The canonical version number.")
-
- val scalaVersions = TaskKey[VersionInfo]("scala-version-info", "The scala versions used for this build.")
-
-
-
- def settings: Seq[Setting[_]] = Seq(
- buildNumberFile <<= baseDirectory apply (_ / "build.number"),
- buildNumberProps <<= buildNumberFile apply loadBuildNumberProps,
- buildRelease := Option(System.getProperty("build.release")) map (!_.isEmpty) getOrElse false,
- mavenSuffix <<= buildRelease apply pickMavenSuffix,
- mavenVersion <<= (buildNumberProps, mavenSuffix) apply makeMavenVersion,
- gitSha <<= (gitRunner, baseDirectory, streams) map getGitSha,
- gitDate <<= (gitRunner, baseDirectory, streams) map getGitDate,
- osgiVersion <<= (buildNumberProps, gitDate, gitSha) map makeOsgiVersion,
- canonicalVersion <<= (buildRelease, mavenVersion, buildNumberProps, gitDate, gitSha) map makeCanonicalVersion,
- scalaVersions <<= (canonicalVersion, mavenVersion, osgiVersion) map VersionInfo.apply
- )
-
-
- /** This generates a properties file, if it does not already exist, with the maximum lastmodified timestamp
- * of any source file. */
- def generateVersionPropertiesFile(name: String)(dir: File, versions: VersionInfo, skip: Boolean, s: TaskStreams): Seq[File] = {
- // TODO - We can probably clean this up by moving caching bits elsewhere perhaps....
- val target = dir / name
- // TODO - Regenerate on triggers, like recompilation or something...
- def hasSameVersion: Boolean = {
- val props = new java.util.Properties
- val in = new java.io.FileInputStream(target)
- try props.load(in) finally in.close()
- versions.canonical == (props getProperty "version.number")
- }
- if (!target.exists || !(skip || hasSameVersion)) {
- makeVersionPropertiesFile(target, versions)
- }
- target :: Nil
- }
-
- // This creates the *.properties file used to determine the current version of scala at runtime. TODO - move these somewhere utility like.
- def makeVersionPropertiesFile(f: File, versions: VersionInfo): Unit =
- IO.write(f, "version.number = "+versions.canonical+"\n"+
- "osgi.number = "+versions.osgi+"\n"+
- "maven.number = "+versions.maven+"\n"+
- "copyright.string = Copyright 2002-2013, LAMP/EPFL")
-
- def makeCanonicalVersion(isRelease: Boolean, mvnVersion: String, base: BaseBuildNumber, gitDate: String, gitSha: String): String =
- if(isRelease) mvnVersion
- else {
- val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
- "%s.%s.%s%s-%s-%s" format (base.major, base.minor, base.patch, suffix, gitDate, gitSha)
- }
-
- def makeMavenVersion(base: BaseBuildNumber, suffix: String): String = {
- val firstSuffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
- "%d.%d.%d%s%s" format (base.major, base.minor, base.patch, firstSuffix, suffix)
- }
-
- def makeOsgiVersion(base: BaseBuildNumber, gitDate: String, gitSha: String): String = {
- val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
- "%s.%s.%s.v%s%s-%s" format (base.major, base.minor, base.patch, gitDate, suffix, gitSha)
- }
-
- /** Determines what the maven sufffix should be for this build. */
- def pickMavenSuffix(isRelease: Boolean): String = {
- def default = if(isRelease) "" else "-SNAPSHOT"
- Option(System.getProperty("maven.version.suffix")) getOrElse default
- }
-
- /** Loads the build.number properties file into SBT. */
- def loadBuildNumberProps(file: File): BaseBuildNumber = {
- val fin = new FileInputStream(file)
- try {
- val props = new Properties()
- props.load(fin)
- def getProp(name: String): Int =
- (for {
- v <- Option(props.getProperty(name))
- v2 <- catching(classOf[NFE]) opt v.toInt
- } yield v2) getOrElse sys.error("Could not convert %s to integer!" format (name))
-
- BaseBuildNumber(
- major=getProp("version.major"),
- minor=getProp("version.minor"),
- patch=getProp("version.patch"),
- bnum =getProp("version.bnum")
- )
- } finally fin.close()
- }
-
-
- def getGitDate(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
- val lines = getGitLines("log","-1","--format=\"%ci\"")(git,baseDirectory, s)
- val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
- // Lines *always* start with " for some reason...
- line drop 1 split "\\s+" match {
- case Array(date, time, _*) => "%s-%s" format (date.replaceAll("\\-", ""), time.replaceAll(":",""))
- case _ => sys.error("Could not parse git date: " + line)
- }
- }
-
- def getGitSha(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
- val lines = getGitLines("log","-1","--format=\"%H\"", "HEAD")(git,baseDirectory, s)
- val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
- val noquote = if(line startsWith "\"") line drop 1 else line
- val nog = if(noquote startsWith "g") noquote drop 1 else noquote
- nog take 10
- }
-
- def getGitLines(args: String*)(git: GitRunner, baseDirectory: File, s: TaskStreams): Seq[String] =
- git(args: _*)(baseDirectory, s.log) split "[\r\n]+"
-}
-
-
-case class BaseBuildNumber(major: Int, minor: Int, patch: Int, bnum: Int) {
- override def toString = "BaseBuildNumber(%d.%d.%d-%d)" format (major, minor, patch, bnum)
-}
diff --git a/project/plugins.sbt b/project/plugins.sbt
deleted file mode 100644
index fdf37e31a6..0000000000
--- a/project/plugins.sbt
+++ /dev/null
@@ -1,9 +0,0 @@
-resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
-
-resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
-
-resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
-
-libraryDependencies += "net.databinder" % "dispatch-http_2.9.1" % "0.8.6"
-
-
diff --git a/project/project/Build.scala b/project/project/Build.scala
deleted file mode 100644
index d3a08b62ba..0000000000
--- a/project/project/Build.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import sbt._
-object PluginDef extends Build {
- override def projects = Seq(root)
- lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
- lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.12")
- lazy val git = uri("git://github.com/sbt/sbt-git.git#scala-build")
-}
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index 8163ae9fc6..ad78ff784c 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -14,7 +14,7 @@ package remote
import java.io.{DataInputStream, DataOutputStream, IOException}
import java.lang.{Thread, SecurityException}
-import java.net.{InetAddress, ServerSocket, Socket, UnknownHostException}
+import java.net.{InetAddress, InetSocketAddress, ServerSocket, Socket, SocketTimeoutException, UnknownHostException}
import scala.collection.mutable
import scala.util.Random
@@ -60,6 +60,23 @@ object TcpService {
portnum
}
+ private val connectTimeoutMillis = {
+ val propName = "scala.actors.tcpSocket.connectTimeoutMillis"
+ val defaultTimeoutMillis = 0
+ sys.props get propName flatMap {
+ timeout =>
+ try {
+ val to = timeout.toInt
+ Debug.info("Using socket timeout $to")
+ Some(to)
+ } catch {
+ case e: NumberFormatException =>
+ Debug.warning(s"""Could not parse $propName = "$timeout" as an Int""")
+ None
+ }
+ } getOrElse defaultTimeoutMillis
+ }
+
var BufSize: Int = 65536
}
@@ -178,7 +195,15 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
}
def connect(n: Node): TcpServiceWorker = synchronized {
- val socket = new Socket(n.address, n.port)
+ val socket = new Socket()
+ val start = System.nanoTime
+ try {
+ socket.connect(new InetSocketAddress(n.address, n.port), TcpService.connectTimeoutMillis)
+ } catch {
+ case e: SocketTimeoutException =>
+ Debug.warning(f"Timed out connecting to $n after ${(System.nanoTime - start) / math.pow(10, 9)}%.3f seconds")
+ throw e
+ }
val worker = new TcpServiceWorker(this, socket)
worker.sendNode(n)
worker.start()
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index ac123cfe26..75a98db6c8 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -63,7 +63,7 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
while (true) {
this.synchronized {
try {
- wait(CHECK_FREQ)
+ wait(CHECK_FREQ.toLong)
} catch {
case _: InterruptedException =>
}
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index 2c4b7677b0..342579db6c 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -103,7 +103,7 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
while (true) {
this.synchronized {
try {
- wait(CHECK_FREQ)
+ wait(CHECK_FREQ.toLong)
} catch {
case _: InterruptedException =>
}
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
index 280c8f4131..ed1805ee1e 100644
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ b/src/actors/scala/actors/scheduler/TerminationService.scala
@@ -39,7 +39,7 @@ private[scheduler] trait TerminationService extends TerminationMonitor {
while (true) {
this.synchronized {
try {
- wait(CHECK_FREQ)
+ wait(CHECK_FREQ.toLong)
} catch {
case _: InterruptedException =>
}
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index ee8776f397..bfd4e7ac40 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -42,10 +42,7 @@ private[actors] object ThreadPoolConfig {
(propIsSetTo("actors.enableForkJoin", "true") || {
Debug.info(this+": java.version = "+javaVersion)
Debug.info(this+": java.vm.vendor = "+javaVmVendor)
-
- // on IBM J9 1.6 do not use ForkJoinPool
- // XXX this all needs to go into Properties.
- isJavaAtLeast("1.6") && ((javaVmVendor contains "Oracle") || (javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ isJavaAtLeast("1.6")
})
catch {
case _: SecurityException => false
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
index 55d83c2e8b..b1e2d97c6f 100644
--- a/src/asm/scala/tools/asm/tree/InsnList.java
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -244,6 +244,14 @@ public class InsnList {
* {@link InsnList}</i>.
*/
public void add(final AbstractInsnNode insn) {
+ if(insn.prev != null || insn.next != null) {
+ // Adding an instruction that still refers to others (in the same or another InsnList) leads to hard to debug bugs.
+ // Initially everything may look ok (e.g. iteration follows `next` thus a stale `prev` isn't noticed).
+ // However, a stale link brings the doubly-linked into disarray e.g. upon removing an element,
+ // which results in the `next` of a stale `prev` being updated, among other failure scenarios.
+ // Better fail early.
+ throw new RuntimeException("Instruction " + insn + " already belongs to some InsnList.");
+ }
++size;
if (last == null) {
first = insn;
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index bd946bf0f3..e70173319e 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -164,11 +164,7 @@
<attribute name="repository" />
<attribute name="version" />
<sequential>
- <deploy-remote name="scala-library" version="@{version}" repository="@{repository}">
- <extra-attachments>
- <artifact:attach type="jar" file="scala-library/scala-library-docs.jar" classifier="javadoc" />
- </extra-attachments>
- </deploy-remote>
+ <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
<deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
index e8c6649721..3d37ef8174 100644
--- a/src/build/maven/scala-actors-pom.xml
+++ b/src/build/maven/scala-actors-pom.xml
@@ -30,6 +30,9 @@
<system>JIRA</system>
<url>https://issues.scala-lang.org/</url>
</issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index d1192b2dd8..fc9964ae92 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -30,6 +30,9 @@
<system>JIRA</system>
<url>https://issues.scala-lang.org/</url>
</issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
<dependencies>
<!--<dependency>
<groupId>com.typesafe</groupId>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
index 7a1613f42c..56d2ffc57c 100644
--- a/src/build/maven/scala-reflect-pom.xml
+++ b/src/build/maven/scala-reflect-pom.xml
@@ -30,7 +30,9 @@
<system>JIRA</system>
<url>https://issues.scala-lang.org/</url>
</issueManagement>
-
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
diff --git a/src/build/maven/scala-swing-pom.xml b/src/build/maven/scala-swing-pom.xml
index 3df5db5b21..5099fe11dc 100644
--- a/src/build/maven/scala-swing-pom.xml
+++ b/src/build/maven/scala-swing-pom.xml
@@ -30,6 +30,9 @@
<system>JIRA</system>
<url>https://issues.scala-lang.org/</url>
</issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 381d3f1931..20c4034107 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -112,8 +112,10 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
+ <!-- be sure to use a relative symlink to make the distribution portable,
+ `resource` is relative to directory of `link` -->
<symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
- resource="${dists.dir}/archives/scala-${version.number}-sources.tgz"
+ resource="scala-${version.number}-sources.tgz"
overwrite="yes"/>
</target>
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
new file mode 100644
index 0000000000..32c6da8007
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -0,0 +1,33 @@
+package scala.reflect.macros
+package compiler
+
+import scala.tools.nsc.Global
+import scala.reflect.macros.contexts.Context
+
+abstract class DefaultMacroCompiler extends Resolvers
+ with Validators
+ with Errors {
+ val global: Global
+ import global._
+
+ val typer: global.analyzer.Typer
+ private implicit val context0 = typer.context
+ val context = typer.context
+
+ val macroDdef: DefDef
+ lazy val macroDef = macroDdef.symbol
+
+ private case class MacroImplResolutionException(pos: Position, msg: String) extends Exception
+ def abort(pos: Position, msg: String) = throw MacroImplResolutionException(pos, msg)
+
+ def resolveMacroImpl: Tree = {
+ try {
+ validateMacroImplRef()
+ macroImplRef
+ } catch {
+ case MacroImplResolutionException(pos, msg) =>
+ context.error(pos, msg)
+ EmptyTree
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
new file mode 100644
index 0000000000..dd3142127e
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -0,0 +1,113 @@
+package scala.reflect.macros
+package compiler
+
+import scala.compat.Platform.EOL
+import scala.reflect.macros.util.Traces
+
+trait Errors extends Traces {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import typer.TyperErrorGen._
+ import typer.infer.InferErrorGen._
+ def globalSettings = global.settings
+
+ // sanity check errors
+
+ private def implRefError(message: String) = abort(macroDdef.pos, message)
+
+ def MacroImplReferenceWrongShapeError() = implRefError(
+ "macro implementation reference has wrong shape. required:\n"+
+ "macro [<static object>].<method name>[[<type args>]] or\n" +
+ "macro [<macro bundle>].<method name>[[<type args>]]")
+
+ def MacroImplNotPublicError() = implRefError("macro implementation must be public")
+
+ def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
+
+ def MacroImplWrongNumberOfTypeArgumentsError() = implRefError(TypedApplyWrongNumberOfTpeParametersErrorMessage(macroImplRef))
+
+ // compatibility errors
+
+ // helpers
+
+ private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
+ val noun = if (flavor == "value") "parameter" else "type parameter"
+ val message = noun + " lists have different length, " + violation + " extra " + noun
+ val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
+ message + suffix
+ }
+
+ private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+
+ private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
+ var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
+ if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
+ var retPart = restpe.toString
+ if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
+ argsPart + ": " + retPart
+ }
+
+ // not exactly an error generator, but very related
+ // and I dearly wanted to push it away from Macros.scala
+ private def checkConforms(slot: String, rtpe: Type, atpe: Type) = {
+ val verbose = macroDebugVerbose || settings.explaintypes.value
+
+ def check(rtpe: Type, atpe: Type): Boolean = {
+ def success() = { if (verbose) println(rtpe + " <: " + atpe + "?" + EOL + "true"); true }
+ (rtpe, atpe) match {
+ case _ if rtpe eq atpe => success()
+ case (TypeRef(_, RepeatedParamClass, rtpe :: Nil), TypeRef(_, RepeatedParamClass, atpe :: Nil)) => check(rtpe, atpe)
+ case (ExprClassOf(_), TreeType()) => success()
+ case (TreeType(), ExprClassOf(_)) => success()
+ case _ => rtpe <:< atpe
+ }
+ }
+
+ val ok =
+ if (verbose) withTypesExplained(check(rtpe, atpe))
+ else check(rtpe, atpe)
+ if (!ok) {
+ if (!macroDebugVerbose)
+ explainTypes(rtpe, atpe)
+ compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
+ }
+ }
+
+ private def compatibilityError(message: String) =
+ implRefError(
+ "macro implementation has wrong shape:"+
+ "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
+ "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
+ "\n" + message)
+
+ def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
+
+ def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
+
+ def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
+
+ def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
+
+ def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkConforms("parameter " + rparam.name, rparam.tpe, atpe)
+
+ def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkConforms("return type", atpe, rret)
+
+ def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
+
+ def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
+ def fail(paramName: Name) = compatibilityError("types incompatible for parameter " + paramName + ": corresponding is not a vararg parameter")
+ if (isRepeated(rparam) && !isRepeated(aparam)) fail(rparam.name)
+ if (!isRepeated(rparam) && isRepeated(aparam)) fail(aparam.name)
+ }
+
+ def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
+ compatibilityError(NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
+
+ def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
+ compatibilityError(
+ "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
+ ex.getMessage)
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
new file mode 100644
index 0000000000..3025eb52a1
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -0,0 +1,91 @@
+package scala.reflect.macros
+package compiler
+
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Resolvers {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import treeInfo._
+ import gen._
+
+ /** Determines the type of context implied by the macro def.
+ */
+ val ctxTpe = MacroContextClass.tpe
+
+ /** Resolves a macro impl reference provided in the right-hand side of the given macro definition.
+ *
+ * Acceptable shapes of the right-hand side:
+ * 1) [<static object>].<method name>[[<type args>]] // vanilla macro def
+ * 2) [<macro bundle>].<method name>[[<type args>]] // shiny new macro bundle
+ *
+ * Produces a tree, which represents a reference to a macro implementation if everything goes well,
+ * otherwise reports found errors and returns EmptyTree. The resulting tree should have the following format:
+ *
+ * qualifier.method[targs]
+ *
+ * Qualifier here might be omitted (local macro defs), be a static object (vanilla macro defs)
+ * or be a dummy instance of a macro bundle (e.g. new MyMacro(???).expand).
+ */
+ lazy val macroImplRef: Tree = {
+ val (maybeBundleRef, methName, targs) = macroDdef.rhs match {
+ case Applied(methRef @ Select(bundleRef @ RefTree(qual, bundleName), methName), targs, Nil) =>
+ (RefTree(qual, bundleName.toTypeName), methName, targs)
+ case Applied(Ident(methName), targs, Nil) =>
+ (Ident(context.owner.enclClass), methName, targs)
+ case _ =>
+ (EmptyTree, TermName(""), Nil)
+ }
+
+ val untypedImplRef = typer.silent(_.typedType(maybeBundleRef)) match {
+ case SilentResultValue(result) if isMacroBundleType(result.tpe) =>
+ val bundleClass = result.tpe.typeSymbol
+ if (!bundleClass.owner.isPackageClass) abort(macroDef.pos, "macro bundles can only be defined as top-level classes or traits")
+
+ // synthesize the invoker, i.e. given a top-level `trait Foo extends Macro { def expand = ... } `
+ // create a top-level definition `class FooInvoker(val c: Context) extends Foo` in MACRO_INVOKER_PACKAGE
+ val invokerPid = gen.mkUnattributedRef(nme.MACRO_INVOKER_PACKAGE)
+ val invokerName = TypeName(bundleClass.fullName.split('.').map(_.capitalize).mkString("") + nme.MACRO_INVOKER_SUFFIX)
+ val invokerFullName = TypeName(s"$invokerPid.$invokerName")
+ val existingInvoker = rootMirror.getClassIfDefined(invokerFullName)
+ if (!currentRun.compiles(existingInvoker)) {
+ def mkContextValDef(flags: Long) = ValDef(Modifiers(flags), nme.c, TypeTree(ctxTpe), EmptyTree)
+ val contextField = mkContextValDef(PARAMACCESSOR)
+ val contextParam = mkContextValDef(PARAM | PARAMACCESSOR)
+ val invokerCtor = DefDef(Modifiers(), nme.CONSTRUCTOR, Nil, List(List(contextParam)), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(()))))
+ val invoker = atPos(bundleClass.pos)(ClassDef(NoMods, invokerName, Nil, Template(List(Ident(bundleClass)), emptyValDef, List(contextField, invokerCtor))))
+ currentRun.compileLate(PackageDef(invokerPid, List(invoker)))
+ }
+
+ // synthesize the macro impl reference, which is going to look like:
+ // `new Foo$invoker(???).expand` plus the optional type arguments
+ val instanceOfInvoker = New(Select(invokerPid, invokerName), List(List(Select(scalaDot(nme.Predef), nme.???))))
+ gen.mkTypeApply(Select(instanceOfInvoker, methName), targs)
+ case _ =>
+ macroDdef.rhs
+ }
+
+ val typedImplRef = typer.silent(_.typed(markMacroImplRef(untypedImplRef)), reportAmbiguousErrors = false)
+ typedImplRef match {
+ case SilentResultValue(success) => success
+ case SilentTypeError(err) => abort(err.errPos, err.errMsg)
+ }
+ }
+
+ // FIXME: cannot write this concisely because of SI-7507
+ // lazy val (isImplBundle, macroImplOwner, macroImpl, macroImplTargs) =
+ private lazy val dissectedMacroImplRef =
+ macroImplRef match {
+ case MacroImplReference(isBundle, owner, meth, targs) => (isBundle, owner, meth, targs)
+ case _ => MacroImplReferenceWrongShapeError()
+ }
+ lazy val isImplBundle = dissectedMacroImplRef._1
+ lazy val isImplMethod = !isImplBundle
+ lazy val macroImplOwner = dissectedMacroImplRef._2
+ lazy val macroImpl = dissectedMacroImplRef._3
+ lazy val targs = dissectedMacroImplRef._4
+}
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
new file mode 100644
index 0000000000..60cfc94a23
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -0,0 +1,193 @@
+package scala.reflect.macros
+package compiler
+
+import java.util.UUID.randomUUID
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Validators {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import treeInfo._
+ import typer.infer._
+
+ def validateMacroImplRef() = {
+ sanityCheck()
+ if (macroImpl != Predef_???) checkMacroDefMacroImplCorrespondence()
+ }
+
+ private def sanityCheck() = {
+ if (!macroImpl.isMethod) MacroImplReferenceWrongShapeError()
+ if (!macroImpl.isPublic) MacroImplNotPublicError()
+ if (macroImpl.isOverloaded) MacroImplOverloadedError()
+ if (macroImpl.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError()
+ val declaredInStaticObject = isImplMethod && (macroImplOwner.isStaticOwner || macroImplOwner.moduleClass.isStaticOwner)
+ val declaredInTopLevelClass = isImplBundle && macroImplOwner.owner.isPackageClass
+ if (!declaredInStaticObject && !declaredInTopLevelClass) MacroImplReferenceWrongShapeError()
+ }
+
+ private def checkMacroDefMacroImplCorrespondence() = {
+ val atvars = atparams map freshVar
+ def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
+
+ // we only check strict correspondence between value parameterss
+ // type parameters of macro defs and macro impls don't have to coincide with each other
+ val implicitParams = aparamss.flatten filter (_.isImplicit)
+ if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
+ if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
+ map2(aparamss, rparamss)((aparams, rparams) => {
+ if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
+ if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
+ })
+
+ try {
+ // cannot fuse this map2 and the map2 above because if aparamss.flatten != rparamss.flatten
+ // then `atpeToRtpe` is going to fail with an unsound substitution
+ map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
+ if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
+ if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
+ val aparamtpe = aparam.tpe.dealias match {
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= ctxTpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case tpe => tpe
+ }
+ checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
+ })
+
+ checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
+
+ val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
+ val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
+ val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
+ boundsOk match {
+ case SilentResultValue(true) => // do nothing, success
+ case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
+ }
+ } catch {
+ case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
+ }
+ }
+
+ // aXXX (e.g. aparamss) => characteristics of the actual macro impl signature extracted from the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparamss) => characteristics of the reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+ // FIXME: cannot write this concisely because of SI-7507
+ //lazy val MacroImplSig(atparams, aparamss, aret) = macroImplSig
+ //lazy val MacroImplSig(_, rparamss, rret) = referenceMacroImplSig
+ lazy val atparams = macroImplSig.tparams
+ lazy val aparamss = macroImplSig.paramss
+ lazy val aret = macroImplSig.ret
+ lazy val rparamss = referenceMacroImplSig.paramss
+ lazy val rret = referenceMacroImplSig.ret
+
+ // Technically this can be just an alias to MethodType, but promoting it to a first-class entity
+ // provides better encapsulation and convenient syntax for pattern matching.
+ private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type)
+
+ /** An actual macro implementation signature extracted from a macro implementation method.
+ *
+ * For the following macro impl:
+ * def fooBar[T: c.WeakTypeTag]
+ * (c: scala.reflect.macros.Context)
+ * (xs: c.Expr[List[T]])
+ * : c.Expr[T] = ...
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]])c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * This method cannot be reduced to just macroImpl.info, because macro implementations might
+ * come in different shapes. If the implementation is an apply method of a Macro-compatible object,
+ * then it won't have (c: Context) in its parameters, but will rather refer to Macro.c.
+ *
+ * @param macroImpl The macro implementation symbol
+ */
+ private lazy val macroImplSig: MacroImplSig = {
+ val tparams = macroImpl.typeParams
+ val paramss = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol)
+ val ret = macroImpl.info.finalResultType
+ MacroImplSig(tparams, paramss, ret)
+ }
+
+ /** A reference macro implementation signature extracted from a given macro definition.
+ *
+ * For the following macro def:
+ * def foo[T](xs: List[T]): T = macro fooBar
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]])c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * Also note that we need a DefDef, not the corresponding MethodSymbol, because that symbol would be of no use for us.
+ * Macro signatures are verified when typechecking macro defs, which means that at that moment inspecting macroDef.info
+ * means asking for cyclic reference errors.
+ *
+ * We need macro implementation symbol as well, because the return type of the macro definition might be omitted,
+ * and in that case we'd need to infer it from the return type of the macro implementation. Luckily for us, we can
+ * use that symbol without a risk of running into cycles.
+ *
+ * @param typer Typechecker of `macroDdef`
+ * @param macroDdef The macro definition tree
+ * @param macroImpl The macro implementation symbol
+ */
+ private lazy val referenceMacroImplSig: MacroImplSig = {
+ // had to move method's body to an object because of the recursive dependencies between sigma and param
+ object SigGenerator {
+ val cache = scala.collection.mutable.Map[Symbol, Symbol]()
+ val ctxPrefix =
+ if (isImplMethod) singleType(NoPrefix, makeParam(nme.macroContext, macroDdef.pos, ctxTpe, SYNTHETIC))
+ else singleType(ThisType(macroImpl.owner), macroImpl.owner.tpe.member(nme.c))
+ var paramss =
+ if (isImplMethod) List(ctxPrefix.termSymbol) :: mmap(macroDdef.vparamss)(param)
+ else mmap(macroDdef.vparamss)(param)
+ val macroDefRet =
+ if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
+ else computeMacroDefTypeFromMacroImplRef(macroDdef, macroImplRef)
+ val implReturnType = sigma(increaseMetalevel(ctxPrefix, macroDefRet))
+
+ object SigmaTypeMap extends TypeMap {
+ def mapPrefix(pre: Type) = pre match {
+ case ThisType(sym) if sym == macroDef.owner =>
+ singleType(singleType(ctxPrefix, MacroContextPrefix), ExprValue)
+ case SingleType(NoPrefix, sym) =>
+ mfind(macroDdef.vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue))
+ case _ =>
+ mapOver(pre)
+ }
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = mapPrefix(pre)
+ val args1 = mapOverArgs(args, sym.typeParams)
+ if ((pre eq pre1) && (args eq args1)) tp
+ else typeRef(pre1, sym, args1)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ def sigma(tpe: Type): Type = SigmaTypeMap(tpe)
+
+ def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) =
+ macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe
+ def param(tree: Tree): Symbol = (
+ cache.getOrElseUpdate(tree.symbol, {
+ val sym = tree.symbol
+ assert(sym.isTerm, s"sym = $sym, tree = $tree")
+ makeParam(sym.name, sym.pos, sigma(increaseMetalevel(ctxPrefix, sym.tpe)), sym.flags)
+ })
+ )
+ }
+
+ import SigGenerator._
+ macroLogVerbose(s"generating macroImplSigs for: $macroDdef")
+ val result = MacroImplSig(macroDdef.tparams map (_.symbol), paramss, implReturnType)
+ macroLogVerbose(s"result is: $result")
+ result
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
index ff870e728e..cc64d97d85 100644
--- a/src/compiler/scala/reflect/macros/runtime/Aliases.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Aliases {
self: Context =>
@@ -28,4 +28,8 @@ trait Aliases {
override def typeTag[T](implicit ttag: TypeTag[T]) = ttag
override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+
+ implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) {
+ def toImplicitCandidate = ImplicitCandidate(oi.info.pre, oi.info.sym, oi.pt, oi.tree)
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala
new file mode 100644
index 0000000000..bd1d7d5248
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Context.scala
@@ -0,0 +1,29 @@
+package scala.reflect.macros
+package contexts
+
+import scala.tools.nsc.Global
+
+abstract class Context extends scala.reflect.macros.Context
+ with Aliases
+ with Enclosures
+ with Names
+ with Reifiers
+ with FrontEnds
+ with Infrastructure
+ with Typers
+ with Parsers
+ with Evals
+ with ExprUtils
+ with Synthetics
+ with Traces {
+
+ val universe: Global
+
+ val mirror: universe.Mirror = universe.rootMirror
+
+ val callsiteTyper: universe.analyzer.Typer
+
+ val prefix: Expr[PrefixType]
+
+ val expandee: Tree
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
new file mode 100644
index 0000000000..bb88c8d5e1
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
@@ -0,0 +1,36 @@
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.{ClassTag, classTag}
+
+trait Enclosures {
+ self: Context =>
+
+ import universe._
+
+ type MacroRole = analyzer.MacroRole
+ def APPLY_ROLE = analyzer.APPLY_ROLE
+ def macroRole: MacroRole
+
+ private lazy val site = callsiteTyper.context
+ private lazy val enclTrees = site.enclosingContextChain map (_.tree)
+ private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
+ private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree
+ private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees))
+
+ // vals are eager to simplify debugging
+ // after all we wouldn't save that much time by making them lazy
+ val macroApplication: Tree = expandee
+ def enclosingPackage: PackageDef = strictEnclosure[PackageDef]
+ val enclosingClass: Tree = lenientEnclosure[ImplDef]
+ def enclosingImpl: ImplDef = strictEnclosure[ImplDef]
+ def enclosingTemplate: Template = strictEnclosure[Template]
+ val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
+ val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
+ val enclosingMethod: Tree = lenientEnclosure[DefDef]
+ def enclosingDef: DefDef = strictEnclosure[DefDef]
+ val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
+ val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
+ val enclosingRun: Run = universe.currentRun
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala
index 1f7b5f2ff1..84928ddf86 100644
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
import scala.reflect.runtime.{universe => ru}
import scala.tools.reflect.ToolBox
@@ -7,7 +7,7 @@ import scala.tools.reflect.ToolBox
trait Evals {
self: Context =>
- private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.macroClassloader)
+ private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.defaultMacroClassloader)
private lazy val evalToolBox = evalMirror.mkToolBox()
private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
index a719beed97..4846325d1e 100644
--- a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
+++ b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait ExprUtils {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
index a6a198e1b4..fda05de09c 100644
--- a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
+++ b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
@@ -1,5 +1,7 @@
package scala.reflect.macros
-package runtime
+package contexts
+
+import scala.reflect.macros.runtime.AbortMacroException
trait FrontEnds {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
index 7781693822..df7aa4d2be 100644
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Infrastructure {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
index 635e8bcd45..e535754a98 100644
--- a/src/compiler/scala/reflect/macros/runtime/Names.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Names {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index 566bcde73d..3dab02beba 100644
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
import scala.language.existentials
import scala.tools.reflect.ToolBox
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
index 7ec3457c6a..ecef1c7289 100644
--- a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
@@ -4,7 +4,7 @@
*/
package scala.reflect.macros
-package runtime
+package contexts
trait Reifiers {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Synthetics.scala b/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
index 73f3ab8d20..ada16a8113 100644
--- a/src/compiler/scala/reflect/macros/runtime/Synthetics.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
@@ -3,9 +3,8 @@
*/
package scala.reflect.macros
-package runtime
+package contexts
-import java.util.UUID._
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.BatchSourceFile
import scala.reflect.io.VirtualFile
@@ -42,11 +41,6 @@ trait Synthetics {
else EmptyTree
}
- // TODO: provide a way to specify a pretty name for debugging purposes
- private def randomFileName() = (
- "macroSynthetic-" + randomUUID().toString.replace("-", "") + ".scala"
- )
-
def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: universe.ImplDef): RefTree =
introduceTopLevel(packagePrototype, List(definition)).head
@@ -55,18 +49,7 @@ trait Synthetics {
private def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: List[universe.ImplDef]): List[RefTree] = {
val code @ PackageDef(pid, _) = implicitly[PackageSpec[T]].mkPackageDef(packagePrototype, definitions)
- val syntheticFileName = randomFileName()
- // compatibility with SBT
- // on the one hand, we need to specify some jfile here, otherwise sbt crashes with an NPE (SI-6870)
- // on the other hand, we can't specify the obvious enclosingUnit, because then sbt somehow fails to run tests using type macros
- // okay, now let's specify a guaranteedly non-existent file in an existing directory (so that we don't run into permission problems)
- val relatedJfile = enclosingUnit.source.file.file
- val fakeJfile = if (relatedJfile != null) new java.io.File(relatedJfile.getParent, syntheticFileName) else null
- val virtualFile = new VirtualFile(syntheticFileName) { override def file = fakeJfile }
- val sourceFile = new BatchSourceFile(virtualFile, code.toString)
- val unit = new CompilationUnit(sourceFile)
- unit.body = code
- universe.currentRun.compileLate(unit)
+ universe.currentRun.compileLate(code)
definitions map (definition => Select(pid, definition.name))
}
diff --git a/src/compiler/scala/reflect/macros/runtime/Traces.scala b/src/compiler/scala/reflect/macros/contexts/Traces.scala
index 0238e9f84e..df47f6ba81 100644
--- a/src/compiler/scala/reflect/macros/runtime/Traces.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Traces.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Traces extends util.Traces {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
index 7e268247dd..4a1122b913 100644
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
import scala.reflect.internal.Mode
@@ -8,10 +8,10 @@ trait Typers {
def openMacros: List[Context] = this :: universe.analyzer.openMacros
- def openImplicits: List[(Type, Tree)] = callsiteTyper.context.openImplicits
+ def openImplicits: List[ImplicitCandidate] = callsiteTyper.context.openImplicits.map(_.toImplicitCandidate)
/**
- * @see [[scala.tools.reflect.Toolbox.typeCheck]]
+ * @see [[scala.tools.reflect.ToolBox.typeCheck]]
*/
def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
@@ -24,7 +24,7 @@ trait Typers {
// typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
// I'd advise fixing the root cause: finding why the context is not set to report errors
// (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
- wrapper(callsiteTyper.silent(_.typed(tree, Mode.EXPRmode, pt)) match {
+ wrapper(callsiteTyper.silent(_.typed(tree, pt), reportAmbiguousErrors = false) match {
case universe.analyzer.SilentResultValue(result) =>
macroLogVerbose(result)
result
@@ -37,28 +37,13 @@ trait Typers {
def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
macroLogVerbose("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
- inferImplicit(universe.EmptyTree, pt, isView = false, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
+ universe.analyzer.inferImplicit(universe.EmptyTree, pt, false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
macroLogVerbose("inferring implicit view from %s to %s for %s, macros = %s".format(from, to, tree, !withMacrosDisabled))
val viewTpe = universe.appliedType(universe.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
- inferImplicit(tree, viewTpe, isView = true, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
- }
-
- private def inferImplicit(tree: Tree, pt: Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: Position): Tree = {
- import universe.analyzer.SearchResult
- val context = callsiteTyper.context
- val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
- def wrapper (inference: => SearchResult) = wrapper1(inference)
- wrapper(universe.analyzer.inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos)) match {
- case failure if failure.tree.isEmpty =>
- macroLogVerbose("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
- if (context.hasErrors) throw new TypecheckException(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
- universe.EmptyTree
- case success =>
- success.tree
- }
+ universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(tree)
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
deleted file mode 100644
index 76c684f6d7..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Context.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.tools.nsc.Global
-
-abstract class Context extends scala.reflect.macros.Context
- with Aliases
- with Enclosures
- with Names
- with Reifiers
- with FrontEnds
- with Infrastructure
- with Typers
- with Parsers
- with Evals
- with ExprUtils
- with Synthetics
- with Traces {
-
- val universe: Global
-
- val mirror: universe.Mirror = universe.rootMirror
-
- val callsiteTyper: universe.analyzer.Typer
-
- val prefix: Expr[PrefixType]
-
- val expandee: Tree
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
deleted file mode 100644
index 8fe0b09700..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.reflect.{ClassTag, classTag}
-
-trait Enclosures {
- self: Context =>
-
- import universe._
-
- type MacroRole = analyzer.MacroRole
- def APPLY_ROLE = analyzer.APPLY_ROLE
- def macroRole: MacroRole
-
- private lazy val site = callsiteTyper.context
- private lazy val enclTrees = site.enclosingContextChain map (_.tree)
- private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
-
- private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree
- private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees))
-
- // vals are eager to simplify debugging
- // after all we wouldn't save that much time by making them lazy
- val macroApplication: Tree = expandee
- def enclosingPackage: PackageDef = strictEnclosure[PackageDef]
- val enclosingClass: Tree = lenientEnclosure[ImplDef]
- def enclosingImpl: ImplDef = strictEnclosure[ImplDef]
- def enclosingTemplate: Template = strictEnclosure[Template]
- val enclosingImplicits: List[(Type, Tree)] = site.openImplicits
- val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
- val enclosingMethod: Tree = lenientEnclosure[DefDef]
- def enclosingDef: DefDef = strictEnclosure[DefDef]
- val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
- val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
- val enclosingRun: Run = universe.currentRun
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
new file mode 100644
index 0000000000..3ef11fad9d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
@@ -0,0 +1,31 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.macros.{Context => ApiContext}
+
+trait JavaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ trait JavaReflectionResolvers {
+ self: MacroRuntimeResolver =>
+
+ import global._
+
+ def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
+ val implClass = Class.forName(className, true, classLoader)
+ val implMeths = implClass.getDeclaredMethods.find(_.getName == methName)
+ // relies on the fact that macro impls cannot be overloaded
+ // so every methName can resolve to at maximum one method
+ val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
+ macroLogVerbose(s"successfully loaded macro impl as ($implClass, $implMeth)")
+ args => {
+ val implObj =
+ if (isBundle) implClass.getConstructor(classOf[ApiContext]).newInstance(args.c)
+ else ReflectionUtils.staticSingletonInstance(implClass)
+ val implArgs = if (isBundle) args.others else args.c +: args.others
+ implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*)
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
new file mode 100644
index 0000000000..0f89163803
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -0,0 +1,74 @@
+package scala.reflect.macros
+package runtime
+
+import scala.collection.mutable.{Map => MutableMap}
+import scala.reflect.internal.Flags._
+import scala.reflect.runtime.ReflectionUtils
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.util.AbstractFileClassLoader
+
+trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ import global._
+ import definitions._
+
+ /** Produces a function that can be used to invoke macro implementation for a given macro definition:
+ * 1) Looks up macro implementation symbol in this universe.
+ * 2) Loads its enclosing class from the macro classloader.
+ * 3) Loads the companion of that enclosing class from the macro classloader.
+ * 4) Resolves macro implementation within the loaded companion.
+ *
+ * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
+ * `null` otherwise.
+ */
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+ def macroRuntime(macroDef: Symbol): MacroRuntime = {
+ macroLogVerbose(s"looking for macro implementation: $macroDef")
+ if (fastTrack contains macroDef) {
+ macroLogVerbose("macro expansion is serviced by a fast track")
+ fastTrack(macroDef)
+ } else {
+ macroRuntimesCache.getOrElseUpdate(macroDef, new MacroRuntimeResolver(macroDef).resolveRuntime())
+ }
+ }
+
+ /** Macro classloader that is used to resolve and run macro implementations.
+ * Loads classes from from -cp (aka the library classpath).
+ * Is also capable of detecting REPL and reusing its classloader.
+ *
+ * When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations,
+ * which compiles implementations into a virtual directory (very much like REPL does) and then conjures
+ * a classloader mapped to that virtual directory.
+ */
+ lazy val defaultMacroClassloader: ClassLoader = findMacroClassLoader()
+
+ /** Abstracts away resolution of macro runtimes.
+ */
+ type MacroRuntime = MacroArgs => Any
+ class MacroRuntimeResolver(val macroDef: Symbol) extends JavaReflectionResolvers
+ with ScalaReflectionResolvers {
+ val binding = loadMacroImplBinding(macroDef)
+ val isBundle = binding.isBundle
+ val className = binding.className
+ val methName = binding.methName
+
+ def resolveRuntime(): MacroRuntime = {
+ if (className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded) {
+ args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
+ } else {
+ try {
+ macroLogVerbose(s"resolving macro implementation as $className.$methName (isBundle = $isBundle)")
+ macroLogVerbose(s"classloader is: ${ReflectionUtils.show(defaultMacroClassloader)}")
+ // resolveScalaReflectionRuntime(defaultMacroClassloader)
+ resolveJavaReflectionRuntime(defaultMacroClassloader)
+ } catch {
+ case ex: Exception =>
+ macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
+ macroDef setFlag IS_ERROR
+ null
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
new file mode 100644
index 0000000000..1999e525ff
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
@@ -0,0 +1,33 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.{universe => ru}
+
+trait ScalaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ trait ScalaReflectionResolvers {
+ self: MacroRuntimeResolver =>
+
+ import global._
+
+ def resolveScalaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
+ val macroMirror: ru.JavaMirror = ru.runtimeMirror(classLoader)
+ val implContainerSym = macroMirror.classSymbol(Class.forName(className, true, classLoader))
+ val implMethSym = implContainerSym.typeSignature.member(ru.TermName(methName)).asMethod
+ macroLogVerbose(s"successfully loaded macro impl as ($implContainerSym, $implMethSym)")
+ args => {
+ val implContainer =
+ if (isBundle) {
+ val implCtorSym = implContainerSym.typeSignature.member(ru.nme.CONSTRUCTOR).asMethod
+ macroMirror.reflectClass(implContainerSym).reflectConstructor(implCtorSym)(args.c)
+ } else {
+ macroMirror.reflectModule(implContainerSym.module.asModule).instance
+ }
+ val implMeth = macroMirror.reflect(implContainer).reflectMethod(implMethSym)
+ val implArgs = if (isBundle) args.others else args.c +: args.others
+ implMeth(implArgs: _*)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/package.scala b/src/compiler/scala/reflect/macros/runtime/package.scala
new file mode 100644
index 0000000000..9ef8200760
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/package.scala
@@ -0,0 +1,5 @@
+package scala.reflect.macros
+
+package object runtime {
+ type Context = scala.reflect.macros.contexts.Context
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
new file mode 100644
index 0000000000..9b7680717e
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -0,0 +1,71 @@
+package scala.reflect.macros
+package util
+
+import scala.tools.nsc.typechecker.Analyzer
+
+trait Helpers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ /** Transforms parameters lists of a macro impl.
+ * The `transform` function is invoked only for WeakTypeTag evidence parameters.
+ *
+ * The transformer takes two arguments: a value parameter from the parameter list
+ * and a type parameter that is witnesses by the value parameter.
+ *
+ * If the transformer returns a NoSymbol, the value parameter is not included from the result.
+ * If the transformer returns something else, this something else is included in the result instead of the value parameter.
+ *
+ * Despite of being highly esoteric, this function significantly simplifies signature analysis.
+ * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
+ * or to streamline creation of the list of macro arguments.
+ */
+ def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+ val treeInfo.MacroImplReference(isBundle, owner, macroImpl, _) = macroImplRef
+ val paramss = macroImpl.paramss
+ if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
+ val rc =
+ if (isBundle) macroImpl.owner.tpe.member(nme.c)
+ else {
+ def cparam = paramss.head.head
+ if (paramss.head.isEmpty || !(cparam.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
+ cparam
+ }
+ def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
+ case TypeRef(SingleType(SingleType(_, ac), universe), WeakTypeTagClass, targ :: Nil)
+ if ac == rc && universe == MacroContextUniverse =>
+ transform(param, targ.typeSymbol)
+ case _ =>
+ param
+ }
+ val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
+ if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
+ }
+
+ private def dealiasAndRewrap(tp: Type)(fn: Type => Type): Type = {
+ if (isRepeatedParamType(tp)) scalaRepeatedType(fn(tp.typeArgs.head.dealias))
+ else fn(tp.dealias)
+ }
+
+ /** Increases metalevel of the type, i.e. transforms:
+ * * T to c.Expr[T]
+ *
+ * @see Metalevels.scala for more information and examples about metalevels
+ */
+ def increaseMetalevel(pre: Type, tp: Type): Type = transparentShallowTransform(RepeatedParamClass, tp) {
+ case tp => typeRef(pre, MacroContextExprClass, List(tp))
+ }
+
+ /** Decreases metalevel of the type, i.e. transforms:
+ * * c.Expr[T] to T
+ * * Anything else to Any
+ *
+ * @see Metalevels.scala for more information and examples about metalevels
+ */
+ def decreaseMetalevel(tp: Type): Type = transparentShallowTransform(RepeatedParamClass, tp) {
+ case ExprClassOf(runtimeType) => runtimeType
+ case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Traces.scala b/src/compiler/scala/reflect/macros/util/Traces.scala
index d16916b753..2dffc68745 100644
--- a/src/compiler/scala/reflect/macros/util/Traces.scala
+++ b/src/compiler/scala/reflect/macros/util/Traces.scala
@@ -6,8 +6,6 @@ trait Traces {
val macroDebugLite = globalSettings.YmacrodebugLite.value
val macroDebugVerbose = globalSettings.YmacrodebugVerbose.value
- val macroTraceLite = scala.tools.nsc.util.trace when (macroDebugLite || macroDebugVerbose)
- val macroTraceVerbose = scala.tools.nsc.util.trace when macroDebugVerbose
@inline final def macroLogLite(msg: => Any) { if (macroDebugLite || macroDebugVerbose) println(msg) }
@inline final def macroLogVerbose(msg: => Any) { if (macroDebugVerbose) println(msg) }
}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 11857e2172..74949fce6d 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -8,8 +8,9 @@ import scala.reflect.reify.utils.Utils
/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
* See more info in the comments to `reify` in scala.reflect.api.Universe.
*
- * @author Martin Odersky
- * @version 2.10
+ * @author Martin Odersky
+ * @version 2.10
+ * @since 2.10
*/
abstract class Reifier extends States
with Phases
@@ -31,20 +32,20 @@ abstract class Reifier extends States
this.asInstanceOf[Reifier { val global: Reifier.this.global.type }]
override def hasReifier = true
- /**
- * For `reifee` and other reification parameters, generate a tree of the form
- *
+ /** For `reifee` and other reification parameters, generate a tree of the form
+ * {{{
* {
- * val $u: universe.type = <[ universe ]>
- * val $m: $u.Mirror = <[ mirror ]>
- * $u.Expr[T](rtree) // if data is a Tree
- * $u.TypeTag[T](rtree) // if data is a Type
+ * val \$u: universe.type = <[ universe ]>
+ * val \$m: \$u.Mirror = <[ mirror ]>
+ * \$u.Expr[T](rtree) // if data is a Tree
+ * \$u.TypeTag[T](rtree) // if data is a Type
* }
+ * }}}
*
* where
*
- * - `universe` is the tree that represents the universe the result will be bound to
- * - `mirror` is the tree that represents the mirror the result will be initially bound to
+ * - `universe` is the tree that represents the universe the result will be bound to.
+ * - `mirror` is the tree that represents the mirror the result will be initially bound to.
* - `rtree` is code that generates `reifee` at runtime.
* - `T` is the type that corresponds to `data`.
*
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index 9659134e5b..0bffe55403 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -1,7 +1,7 @@
package scala.reflect.reify
import scala.reflect.macros.{ReificationException, UnexpectedReificationException, TypecheckException}
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
abstract class Taggers {
val c: Context
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 78f85c2634..d3cae3d123 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
import scala.reflect.macros.ReificationException
import scala.tools.nsc.Global
@@ -45,11 +46,14 @@ package object reify {
def reifyType(global: Global)(typer: global.analyzer.Typer,universe: global.Tree, mirror: global.Tree, tpe: global.Type, concrete: Boolean = false): global.Tree =
mkReifier(global)(typer, universe, mirror, tpe, concrete = concrete).reification.asInstanceOf[global.Tree]
- def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe: global.Type, concrete: Boolean = true): global.Tree = {
+ def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe0: global.Type, concrete: Boolean = true): global.Tree = {
import global._
import definitions._
import analyzer.enclosingMacroPosition
+ // SI-7375
+ val tpe = tpe0.dealiasWiden
+
if (tpe.isSpliceable) {
val classTagInScope = typer0.resolveClassTag(enclosingMacroPosition, tpe, allowMaterialization = false)
if (!classTagInScope.isEmpty) return Select(classTagInScope, nme.runtimeClass)
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index 18ea908cdf..c69263399f 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -11,7 +11,7 @@ trait Metalevels {
/**
* Makes sense of cross-stage bindings.
*
- * ================
+ * ----------------
*
* Analysis of cross-stage bindings becomes convenient if we introduce the notion of metalevels.
* Metalevel of a tree is a number that gets incremented every time you reify something and gets decremented when you splice something.
@@ -33,7 +33,7 @@ trait Metalevels {
*
* 1) symbol.metalevel < curr_metalevel. In this case reifier will generate a free variable
* that captures both the name of the symbol (to be compiled successfully) and its value (to be run successfully).
- * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntClass.tpe, x))
+ * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntTpe, x))
*
* 2) symbol.metalevel > curr_metalevel. This leads to a metalevel breach that violates intuitive perception of splicing.
* As defined in macro spec, splicing takes a tree and inserts it into another tree - as simple as that.
@@ -53,7 +53,7 @@ trait Metalevels {
* 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
* 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
*
- * ================
+ * ----------------
*
* As we can see, the only problem is the fact that lhs'es of `splice` can be code blocks that can capture variables from the outside.
* Code inside the lhs of an `splice` is not reified, while the code from the enclosing reify is.
@@ -72,7 +72,7 @@ trait Metalevels {
* Since the result of the inner reify is wrapped in a splice, it won't be reified
* together with the other parts of the outer reify, but will be inserted into that result verbatim.
*
- * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntClass.tpe, x)).
+ * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntTpe, x)).
* However the freevar the reification points to will vanish when the compiler processes the outer reify.
* That's why we need to replace that freevar with a regular symbol that will point to reified x.
*
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index bc2dbeed3e..5f53f558b4 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -167,7 +167,7 @@ trait Reshape {
// if this assumption fails, please, don't be quick to add postprocessing here (like I did before)
// but rather try to fix this in Typer, so that it produces quality originals (like it's done for typedAnnotated)
if (reifyDebug) println("TypeTree, essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
- if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original))
+ if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original.toString.replaceAll("\\s+", " ")))
transform(tt.original)
} else {
// type is deemed to be non-essential
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 7338df1f72..d5f27dc119 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -11,7 +11,7 @@ trait Extractors {
// Example of a reified tree for `reify(List(1, 2))`:
// (also contains an example of a reified type as a third argument to the constructor of Expr)
// {
- // val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ // val $u: scala.reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
// val $m: $u.Mirror = $u.runtimeMirror(Test.this.getClass().getClassLoader());
// $u.Expr[List[Int]]($m, {
// final class $treecreator1 extends scala.reflect.api.TreeCreator {
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index 6362d28580..6036b238b6 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.ant
+package scala
+package tools.ant
import java.io.{File, FileInputStream}
diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala
index 0b6701b6e9..b170ceaed8 100644
--- a/src/compiler/scala/tools/ant/sabbus/Break.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Break.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala
index 5274594f3d..027a828f03 100644
--- a/src/compiler/scala/tools/ant/sabbus/Make.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Make.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
import java.io.File
import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 363c31f6c4..595b45ae51 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.ant
+package scala
+package tools.ant
package sabbus
import java.io.{ File, FileWriter }
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 5f50bb7908..a8736f228b 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant
+package scala
+package tools.ant
package sabbus
import java.io.File
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 411a27b9c7..bd6cf561b9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -14,8 +14,9 @@ set _LINE_TOOLCP=
:another_param
-if "%1%"=="-toolcp" (
- set _LINE_TOOLCP=%2%
+rem Use "%~1" to handle spaces in paths. See http://ss64.com/nt/syntax-args.html
+if "%~1"=="-toolcp" (
+ set _LINE_TOOLCP=%~2
shift
shift
goto another_param
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/src/compiler/scala/tools/cmd/Demo.scala
index af818845bb..fc90140f8f 100644
--- a/src/compiler/scala/tools/cmd/Demo.scala
+++ b/src/compiler/scala/tools/cmd/Demo.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package cmd
/** A sample command specification for illustrative purposes.
diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala
index abffd6bb2e..d1c798b621 100644
--- a/src/compiler/scala/tools/cmd/Interpolation.scala
+++ b/src/compiler/scala/tools/cmd/Interpolation.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package cmd
/** Interpolation logic for generated files. The idea is to be
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 35d4eaf1b6..7e01afac2b 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -22,8 +22,8 @@ trait AnyValReps {
}
def coercionCommentExtra = ""
def coercionComment = """
- /** Language mandated coercions from @name@ to "wider" types.%s
- */""".format(coercionCommentExtra)
+/** Language mandated coercions from @name@ to "wider" types.%s
+ */""".format(coercionCommentExtra)
def implicitCoercions: List[String] = {
val coercions = this match {
@@ -35,7 +35,7 @@ trait AnyValReps {
case _ => Nil
}
if (coercions.isEmpty) Nil
- else coercionComment :: coercions
+ else coercionComment.lines.toList ++ coercions
}
def isCardinal: Boolean = isIntegerType(this)
@@ -183,7 +183,7 @@ trait AnyValReps {
}
def objectLines = {
val comp = if (isCardinal) cardinalCompanion else floatingCompanion
- (comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ implicitCoercions map interpolate
+ interpolate(comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ (implicitCoercions map interpolate)
}
/** Makes a set of binary operations based on the given set of ops, args, and resultFn.
@@ -209,11 +209,14 @@ trait AnyValReps {
)
def lcname = name.toLowerCase
+ def boxedSimpleName = this match {
+ case C => "Character"
+ case I => "Integer"
+ case _ => name
+ }
def boxedName = this match {
case U => "scala.runtime.BoxedUnit"
- case C => "java.lang.Character"
- case I => "java.lang.Integer"
- case _ => "java.lang." + name
+ case _ => "java.lang." + boxedSimpleName
}
def zeroRep = this match {
case L => "0L"
@@ -228,7 +231,13 @@ trait AnyValReps {
def indentN(s: String) = s.lines map indent mkString "\n"
def boxUnboxImpls = Map(
+ "@boxRunTimeDoc@" -> """
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(boxedSimpleName),
"@boxImpl@" -> "%s.valueOf(x)".format(boxedName),
+ "@unboxRunTimeDoc@" -> """
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(name),
"@unboxImpl@" -> "x.asInstanceOf[%s].%sValue()".format(boxedName, lcname),
"@unboxDoc@" -> "the %s resulting from calling %sValue() on `x`".format(name, lcname)
)
@@ -299,7 +308,7 @@ import scala.language.implicitConversions
def allCompanions = """
/** Transform a value type into a boxed reference type.
- *
+ *@boxRunTimeDoc@
* @param x the @name@ to be boxed
* @return a @boxed@ offering `x` as its underlying value.
*/
@@ -308,7 +317,7 @@ def box(x: @name@): @boxed@ = @boxImpl@
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a @boxed@.
- *
+ *@unboxRunTimeDoc@
* @param x the @boxed@ to be unboxed.
* @throws ClassCastException if the argument is not a @boxed@
* @return @unboxDoc@
@@ -471,7 +480,9 @@ override def getClass(): Class[Boolean] = null
def objectLines = interpolate(allCompanions).lines.toList
override def boxUnboxImpls = Map(
+ "@boxRunTimeDoc@" -> "",
"@boxImpl@" -> "scala.runtime.BoxedUnit.UNIT",
+ "@unboxRunTimeDoc@" -> "",
"@unboxImpl@" -> "()",
"@unboxDoc@" -> "the Unit value ()"
)
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index d605ecae8f..c62a977950 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 64f6758e73..3017d8c9cc 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -3,11 +3,12 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import settings.FscSettings
import scala.tools.util.CompileOutputCommon
-import sys.SystemProperties.preferIPv4Stack
+import scala.sys.SystemProperties.preferIPv4Stack
/** The client part of the fsc offline compiler. Instead of compiling
* things itself, it send requests to a CompileServer.
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 8087a31b45..88d5c31b5d 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -113,7 +113,7 @@ class CompileSocket extends CompileOutputCommon {
*/
def getPort(vmArgs: String): Int = {
val maxPolls = 300
- val sleepTime = 25
+ val sleepTime = 25L
var attempts = 0
var port = pollPort()
@@ -152,9 +152,9 @@ class CompileSocket extends CompileOutputCommon {
* cannot be established.
*/
def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
- val maxMillis = 10 * 1000 // try for 10 seconds
- val retryDelay = 50
- val maxAttempts = maxMillis / retryDelay
+ val maxMillis = 10L * 1000 // try for 10 seconds
+ val retryDelay = 50L
+ val maxAttempts = (maxMillis / retryDelay).toInt
def getsock(attempts: Int): Option[Socket] = attempts match {
case 0 => warn("Unable to establish connection to compilation daemon") ; None
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index fbfed6110f..3ac27a42e8 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -1,4 +1,5 @@
-package scala.tools.nsc
+package scala
+package tools.nsc
import scala.tools.nsc.reporters.ConsoleReporter
import Properties.{ versionString, copyrightString, residentPromptString }
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 8b78ddd59d..eafe03d5cd 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -3,10 +3,13 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
+import java.util.UUID._
import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
@@ -14,6 +17,7 @@ import reporters.{ Reporter, ConsoleReporter }
import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString, stackTraceHeadString }
import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
+import scala.reflect.io.VirtualFile
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
import symtab.classfile.Pickler
import plugins.Plugins
@@ -218,7 +222,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
def error(msg: String) = globalError(msg)
- def inform(msg: String) = reporter.echo(msg)
+ override def inform(msg: String) = reporter.echo(msg)
override def globalError(msg: String) = reporter.error(NoPosition, msg)
override def warning(msg: String) =
if (settings.fatalWarnings) globalError(msg)
@@ -1056,14 +1060,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
@inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
@inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op)
- // Owners up to and including the first package class.
+ // Owners which aren't package classes.
private def ownerChainString(sym: Symbol): String = (
if (sym == null) ""
- else sym.ownerChain.span(!_.isPackageClass) match {
- case (xs, pkg :: _) => (xs :+ pkg) mkString " -> "
- case _ => sym.ownerChain mkString " -> " // unlikely
- }
+ else sym.ownerChain takeWhile (!_.isPackageClass) mkString " -> "
)
+
private def formatExplain(pairs: (String, Any)*): String = (
pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
)
@@ -1071,41 +1073,45 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
- override def supplementErrorMessage(errorMessage: String): String =
+ override def supplementErrorMessage(errorMessage: String): String = {
if (currentRun.supplementedError) errorMessage
else try {
+ currentRun.supplementedError = true
val tree = analyzer.lastTreeToTyper
val sym = tree.symbol
val tpe = tree.tpe
- val enclosing = lastSeenContext.enclClassOrMethod.tree
+ val site = lastSeenContext.enclClassOrMethod.owner
+ val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "<unknown>"
+ val context_s = try {
+ // Taking 3 before, 3 after the fingered line.
+ val start = 0 max (tree.pos.line - 3)
+ val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7
+ val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" }
+ strs.mkString("== Source file context for tree position ==\n\n", "\n", "")
+ }
+ catch { case t: Exception => devWarning("" + t) ; "<Cannot read source file>" }
val info1 = formatExplain(
"while compiling" -> currentSource.path,
- "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, enteringPhase=%s".format(globalPhase, phase) ),
+ "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ),
"library version" -> scala.util.Properties.versionString,
"compiler version" -> Properties.versionString,
"reconstructed args" -> settings.recreateArgs.mkString(" ")
)
val info2 = formatExplain(
"last tree to typer" -> tree.summaryString,
+ "tree position" -> pos_s,
+ "tree tpe" -> tpe,
"symbol" -> Option(sym).fold("null")(_.debugLocationString),
- "symbol definition" -> Option(sym).fold("null")(_.defString),
- "tpe" -> tpe,
+ "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"),
+ "symbol package" -> sym.enclosingPackage.fullName,
"symbol owners" -> ownerChainString(sym),
- "context owners" -> ownerChainString(lastSeenContext.owner)
- )
- val info3: List[String] = (
- ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
- ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
- ++ ( if (!settings.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
- ++ ( List(errorMessage) )
+ "call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
)
-
- currentRun.supplementedError = true
-
- ("\n" + info1) :: info2 :: info3 mkString "\n\n"
+ ("\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
}
- catch { case x: Exception => errorMessage }
+ catch { case _: Exception | _: TypeError => errorMessage }
+ }
/** The id of the currently active run
*/
@@ -1389,9 +1395,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def registerPickle(sym: Symbol): Unit = ()
/** does this run compile given class, module, or case factory? */
+ // NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
+ // Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in
+ // *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
+ else if (sym.isTopLevel && sym.isEarlyInitialized) true
else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1489,18 +1499,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
compileUnits(sources map (new CompilationUnit(_)), firstPhase)
}
- def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
- try compileUnitsInternal(units, fromPhase)
- catch { case ex: Throwable =>
- val shown = if (settings.verbose)
- stackTraceString(ex)
- else
- stackTraceHeadString(ex) // note that error stacktraces do not print in fsc
-
- globalError(supplementErrorMessage("uncaught exception during compilation: " + shown))
- throw ex
- }
- }
+ def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit =
+ compileUnitsInternal(units, fromPhase)
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
doInvalidation()
@@ -1619,6 +1619,25 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
+ // TODO: provide a way to specify a pretty name for debugging purposes
+ private def randomFileName() = (
+ "compileLateSynthetic-" + randomUUID().toString.replace("-", "") + ".scala"
+ )
+
+ def compileLate(code: PackageDef) {
+ // compatibility with SBT
+ // on the one hand, we need to specify some jfile here, otherwise sbt crashes with an NPE (SI-6870)
+ // on the other hand, we can't specify the obvious enclosingUnit, because then sbt somehow fails to run tests using type macros
+ // okay, now let's specify a guaranteedly non-existent file in an existing directory (so that we don't run into permission problems)
+ val syntheticFileName = randomFileName()
+ val fakeJfile = new java.io.File(syntheticFileName)
+ val virtualFile = new VirtualFile(syntheticFileName) { override def file = fakeJfile }
+ val sourceFile = new BatchSourceFile(virtualFile, code.toString)
+ val unit = new CompilationUnit(sourceFile)
+ unit.body = code
+ compileLate(unit)
+ }
+
/** Reset package class to state at typer (not sure what this
* is needed for?)
*/
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 9f6f483ad8..a66ee572a9 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -5,6 +5,8 @@
package scala.tools
package nsc
+import scala.language.postfixOps
+
/** The main class for NSC, a compiler for the programming
* language Scala.
*/
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index e2893204e0..84eb688b63 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import scala.tools.nsc.reporters.ConsoleReporter
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 5f8bc71449..d553d71bf5 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import io.{ Directory, File, Path }
import java.io.IOException
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 5ad494177c..f2e5c9b1eb 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -366,7 +366,10 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
+ case None =>
+ val pos = docCommentPos(sym)
+ val loc = pos withPoint (pos.start + vstart + 1)
+ reporter.warning(loc, s"Variable $vname undefined in comment for $sym in $site")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 72538bac08..89652c5a20 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -42,7 +42,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
}
}
- // overflow cases missing from TreePrinter in reflect.api
+ // overflow cases missing from TreePrinter in scala.reflect.api
override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match {
case DocDef(comment, definition) =>
treePrinter.print(comment.raw)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 0077ed0c84..1e70b7091b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package ast
import java.awt.{List => awtList, _}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 7460d1ab31..b17de9b9d5 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -74,7 +74,7 @@ trait TreeDSL {
if (opSym == NoSymbol) ANY_==(other)
else fn(target, opSym, other)
}
- def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe)
+ def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe)
def ANY_== (other: Tree) = fn(target, Any_==, other)
def ANY_!= (other: Tree) = fn(target, Any_!=, other)
def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 6a0f4407fc..0731d78a9b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -87,12 +87,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case DocDef(_, definition) => isPureDef(definition)
case _ => super.isPureDef(tree)
}
-
- /** Does list of trees start with a definition of
- * a class of module with given name (ignoring imports)
- */
- override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
- case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => super.firstDefinesClassOrObject(trees, name)
- }
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 8391ebdafc..41d89aa3b4 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -221,7 +221,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
try unit.body = transform(unit.body)
catch {
case ex: Exception =>
- println(supplementErrorMessage("unhandled exception while transforming "+unit))
+ log(supplementErrorMessage("unhandled exception while transforming "+unit))
throw ex
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 7671912651..fc532f5d44 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -30,6 +30,9 @@ trait ParsersCommon extends ScannersCommon {
val global : Global
import global._
+ def newLiteral(const: Any) = Literal(Constant(const))
+ def literalUnit = newLiteral(())
+
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
@@ -57,7 +60,7 @@ trait ParsersCommon extends ScannersCommon {
if (in.token == LPAREN) inParens(body)
else { accept(LPAREN) ; alt }
- @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant(())))
+ @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit)
@inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
@inline final def inBraces[T](body: => T): T = {
@@ -71,7 +74,7 @@ trait ParsersCommon extends ScannersCommon {
else { accept(LBRACE) ; alt }
@inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
- @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant(())))
+ @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit)
@inline final def dropAnyBraces[T](body: => T): T =
if (in.token == LBRACE) inBraces(body)
else body
@@ -355,13 +358,13 @@ self =>
/* If we see anything but the above, fail. */
return None
}
- Some(makePackaging(0, emptyPkg, newStmts))
+ Some(makeEmptyPackage(0, newStmts))
}
if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
- /** Here we are building an AST representing the following source fiction,
+ /* Here we are building an AST representing the following source fiction,
* where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
@@ -376,15 +379,13 @@ self =>
* }
* }}}
*/
-
- def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
NoMods,
nme.CONSTRUCTOR,
Nil,
ListOfNil,
TypeTree(),
- Block(List(Apply(gen.mkSuperInitCall, Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit)
)
// def main
@@ -395,11 +396,11 @@ self =>
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
- def moduleBody = Template(List(atPos(o2p(in.offset))(scalaAnyRefConstr)), emptyValDef, List(emptyInit, mainDef))
+ def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, emptyValDef, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
// package <empty> { ... }
- makePackaging(0, emptyPkg, List(moduleDef))
+ makeEmptyPackage(0, moduleDef :: Nil)
}
/* --------------- PLACEHOLDERS ------------------------------------------- */
@@ -529,6 +530,10 @@ self =>
else
syntaxError(in.offset, msg, skipIt)
}
+ def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = {
+ syntaxErrorOrIncomplete(msg, skipIt)
+ and
+ }
def expectedMsg(token: Int): String =
token2string(token) + " expected but " +token2string(in.token) + " found."
@@ -563,9 +568,9 @@ self =>
if (!isStatSeqEnd)
acceptStatSep()
- def errorTypeTree = TypeTree() setType ErrorType setPos o2p(in.offset)
- def errorTermTree = Literal(Constant(null)) setPos o2p(in.offset)
- def errorPatternTree = Ident(nme.WILDCARD) setPos o2p(in.offset)
+ def errorTypeTree = setInPos(TypeTree() setType ErrorType)
+ def errorTermTree = setInPos(newLiteral(null))
+ def errorPatternTree = setInPos(Ident(nme.WILDCARD))
/** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
@@ -607,6 +612,10 @@ self =>
}
def isDefIntro = isTemplateIntro || isDclIntro
+ def isTopLevelIntro = in.token match {
+ case PACKAGE | IMPORT | AT => true
+ case _ => isTemplateIntro || isModifier
+ }
def isNumericLit: Boolean = in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => true
@@ -666,6 +675,9 @@ self =>
def atPos[T <: Tree](pos: Position)(t: T): T =
global.atPos(pos)(t)
+ def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
+ def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
+
/** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
case Parens(ts) => ts map convertToParam
@@ -886,7 +898,7 @@ self =>
* }}}
*/
def compoundType(): Tree = compoundTypeRest(
- if (in.token == LBRACE) atPos(o2p(in.offset))(scalaAnyRefConstr)
+ if (in.token == LBRACE) atInPos(scalaAnyRefConstr)
else annotType()
)
@@ -947,12 +959,11 @@ self =>
}
/** Assumed (provisionally) to be TermNames. */
- def ident(skipIt: Boolean): Name =
+ def ident(skipIt: Boolean): Name = (
if (isIdent) rawIdent().encode
- else {
- syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
- nme.ERROR
- }
+ else syntaxErrorOrIncompleteAnd(expectedMsg(IDENTIFIER), skipIt)(nme.ERROR)
+ )
+
def ident(): Name = ident(skipIt = true)
def rawIdent(): Name = try in.name finally in.nextToken()
@@ -1075,31 +1086,24 @@ self =>
* | symbol
* | null
* }}}
- * @note The returned tree does not yet have a position
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false): Tree = {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = atPos(start) {
+ def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
if (in.token == SYMBOLLIT)
Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
else if (in.token == INTERPOLATIONID)
interpolatedString(inPattern = inPattern)
else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
case STRINGLIT | STRINGPART => in.strVal.intern()
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", skipIt = true)
- null
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ => syntaxErrorOrIncompleteAnd("illegal literal", skipIt = true)(null)
})
}
@@ -1112,18 +1116,15 @@ self =>
in.nextToken()
while (in.token == STRINGPART) {
partsBuf += literal()
- exprBuf += {
+ exprBuf += (
if (inPattern) dropAnyBraces(pattern())
- else {
- if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
- else if(in.token == LBRACE) expr()
- else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
- else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", skipIt = true)
- EmptyTree
- }
+ else in.token match {
+ case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+ case LBRACE => expr()
+ case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+ case _ => syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected", skipIt = true)(EmptyTree)
}
- }
+ )
}
if (in.token == STRINGLIT) partsBuf += literal()
@@ -1195,7 +1196,7 @@ self =>
r
} else {
accept(LPAREN)
- Literal(Constant(true))
+ newLiteral(true)
}
}
@@ -1249,7 +1250,7 @@ self =>
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else Literal(Constant(()))
+ else literalUnit
If(cond, thenp, elsep)
}
parseIf
@@ -1280,12 +1281,12 @@ self =>
parseTry
case WHILE =>
def parseWhile = {
+ val start = in.offset
atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.WHILE_PREFIX)
val cond = condExpr()
newLinesOpt()
val body = expr()
- makeWhile(lname.toTermName, cond, body)
+ makeWhile(start, cond, body)
}
}
parseWhile
@@ -1323,7 +1324,7 @@ self =>
case RETURN =>
def parseReturn =
atPos(in.skipToken()) {
- Return(if (isExprIntro) expr() else Literal(Constant(())))
+ Return(if (isExprIntro) expr() else literalUnit)
}
parseReturn
case THROW =>
@@ -1422,8 +1423,9 @@ self =>
* }}}
*/
def postfixExpr(): Tree = {
- val base = opstack
- var top = prefixExpr()
+ val start = in.offset
+ val base = opstack
+ var top = prefixExpr()
while (isIdent) {
top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
@@ -1441,9 +1443,7 @@ self =>
val topinfo = opstack.head
opstack = opstack.tail
val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
- return atPos(od.pos.startOrPoint, topinfo.offset) {
- new PostfixSelect(od, topinfo.operator.encode)
- }
+ return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
}
}
reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
@@ -1458,7 +1458,7 @@ self =>
atPos(in.offset) {
val name = nme.toUnaryName(rawIdent().toTermName)
if (name == nme.UNARY_- && isNumericLit)
- simpleExprRest(atPos(in.offset)(literal(isNegated = true)), canApply = true)
+ simpleExprRest(literal(isNegated = true), canApply = true)
else
Select(stripParens(simpleExpr()), name)
}
@@ -1483,7 +1483,7 @@ self =>
def simpleExpr(): Tree = {
var canApply = true
val t =
- if (isLiteral) atPos(in.offset)(literal())
+ if (isLiteral) literal()
else in.token match {
case XMLSTART =>
xmlLiteral()
@@ -1511,8 +1511,7 @@ self =>
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
makeNew(parents, self, stats, npos, cpos)
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple expression", skipIt = true)
- errorTermTree
+ syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
}
simpleExprRest(t, canApply = canApply)
}
@@ -1832,8 +1831,7 @@ self =>
} else {
badStart
}
- syntaxErrorOrIncomplete(msg, skip)
- errorPatternTree
+ syntaxErrorOrIncompleteAnd(msg, skip)(errorPatternTree)
}
/** {{{
@@ -1849,14 +1847,10 @@ self =>
*
* XXX: Hook for IDE
*/
- def simplePattern(): Tree = {
+ def simplePattern(): Tree = (
// simple diagnostics for this entry point
- def badStart(): Tree = {
- syntaxErrorOrIncomplete("illegal start of simple pattern", skipIt = true)
- errorPatternTree
- }
- simplePattern(badStart)
- }
+ simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree))
+ )
def simplePattern(onError: () => Tree): Tree = {
val start = in.offset
in.token match {
@@ -1866,7 +1860,7 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
case Ident(nme.MINUS) =>
- return atPos(start) { literal(isNegated = true, inPattern = true) }
+ return literal(isNegated = true, inPattern = true, start = start)
case _ =>
}
case _ =>
@@ -1884,7 +1878,7 @@ self =>
atPos(start, start) { Ident(nme.WILDCARD) }
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL =>
- atPos(start) { literal(inPattern = true) }
+ literal(inPattern = true)
case LPAREN =>
atPos(start)(makeParens(noSeq.patterns()))
case XMLSTART =>
@@ -2072,7 +2066,7 @@ self =>
if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
in.token match {
case v @ (VAL | VAR) =>
- mods = mods withPosition (in.token, tokenRange(in))
+ mods = mods withPosition (in.token.toLong, tokenRange(in))
if (v == VAR) mods |= Flags.MUTABLE
in.nextToken()
case _ =>
@@ -2110,7 +2104,7 @@ self =>
expr()
} else EmptyTree
atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
+ ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
}
}
def paramClause(): List[ValDef] = {
@@ -2222,16 +2216,18 @@ self =>
* }}}
*/
def typeBounds(): TypeBoundsTree = {
- val t = TypeBoundsTree(
- bound(SUPERTYPE, tpnme.Nothing),
- bound(SUBTYPE, tpnme.Any)
- )
- t setPos wrappingPos(List(t.hi, t.lo))
+ val lo = bound(SUPERTYPE)
+ val hi = bound(SUBTYPE)
+ val t = TypeBoundsTree(lo, hi)
+ val defined = List(t.hi, t.lo) filter (_.pos.isDefined)
+
+ if (defined.nonEmpty)
+ t setPos wrappingPos(defined)
+ else
+ t setPos o2p(in.offset)
}
- def bound(tok: Int, default: TypeName): Tree =
- if (in.token == tok) { in.nextToken(); typ() }
- else atPos(o2p(in.lastOffset)) { rootScalaDot(default) }
+ def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2524,7 +2520,7 @@ self =>
*/
def constrExpr(vparamss: List[List[ValDef]]): Tree =
if (in.token == LBRACE) constrBlock(vparamss)
- else Block(List(selfInvocation(vparamss)), Literal(Constant(())))
+ else Block(selfInvocation(vparamss) :: Nil, literalUnit)
/** {{{
* SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
@@ -2554,7 +2550,7 @@ self =>
else Nil
}
accept(RBRACE)
- Block(stats, Literal(Constant(())))
+ Block(stats, literalUnit)
}
/** {{{
@@ -2577,8 +2573,7 @@ self =>
case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
- syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", skipIt = true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
}
}
}
@@ -2611,8 +2606,7 @@ self =>
case CASEOBJECT =>
objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/)))
case _ =>
- syntaxErrorOrIncomplete("expected start of definition", skipIt = true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("expected start of definition", skipIt = true)(EmptyTree)
}
}
@@ -2674,6 +2668,40 @@ self =>
}
}
+ /** Create a tree representing a package object, converting
+ * {{{
+ * package object foo { ... }
+ * }}}
+ * to
+ * {{{
+ * package foo {
+ * object `package` { ... }
+ * }
+ * }}}
+ */
+ def packageObjectDef(start: Offset): PackageDef = {
+ val defn = objectDef(in.offset, NoMods)
+ val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
+ val pid = atPos(o2p(defn.pos.startOrPoint))(Ident(defn.name))
+
+ makePackaging(start, pid, module :: Nil)
+ }
+ def packageOrPackageObject(start: Offset): Tree = (
+ if (in.token == OBJECT)
+ joinComment(packageObjectDef(start) :: Nil).head
+ else {
+ in.flushDoc
+ makePackaging(start, pkgQualId(), inBracesOrNil(topStatSeq()))
+ }
+ )
+ // TODO - eliminate this and use "def packageObjectDef" (see call site of this
+ // method for small elaboration.)
+ def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
+ case ModuleDef(mods, name, impl) =>
+ makePackaging(
+ start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
+ }
+
/** {{{
* ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
* TraitParents ::= AnnotType {with AnnotType}
@@ -2753,14 +2781,14 @@ self =>
def anyrefParents() = {
val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
parents0 ::: caseParents match {
- case Nil => List(atPos(o2p(in.offset))(scalaAnyRefConstr))
+ case Nil => atInPos(scalaAnyRefConstr) :: Nil
case ps => ps
}
}
def anyvalConstructor() = (
// Not a well-formed constructor, has to be finished later - see note
// regarding AnyVal constructor in AddInterfaces.
- DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant(()))))
+ DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit))
)
val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
@@ -2809,42 +2837,10 @@ self =>
def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
-/*
- pkg match {
- case id @ Ident(_) =>
- PackageDef(id, stats)
- case Select(qual, name) => // drop this to flatten packages
- makePackaging(start, qual, List(PackageDef(Ident(name), stats)))
- }
- }
-*/
- /** Create a tree representing a package object, converting
- * {{{
- * package object foo { ... }
- * }}}
- * to
- * {{{
- * package foo {
- * object `package` { ... }
- * }
- * }}}
- */
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
- case ModuleDef(mods, name, impl) =>
- makePackaging(
- start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
- }
-
- /** {{{
- * Packaging ::= package QualId [nl] `{' TopStatSeq `}'
- * }}}
- */
- def packaging(start: Int): Tree = {
- val pkg = pkgQualId()
- val stats = inBracesOrNil(topStatSeq())
- makePackaging(start, pkg, stats)
- }
+ def makeEmptyPackage(start: Int, stats: List[Tree]): PackageDef = (
+ makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
+ )
/** {{{
* TopStatSeq ::= TopStat {semi TopStat}
@@ -2860,22 +2856,15 @@ self =>
while (!isStatSeqEnd) {
stats ++= (in.token match {
case PACKAGE =>
- val start = in.skipToken()
- if (in.token == OBJECT)
- joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
- else {
- in.flushDoc
- List(packaging(start))
- }
+ packageOrPackageObject(in.skipToken()) :: Nil
case IMPORT =>
in.flushDoc
importClause()
case x if x == AT || isTemplateIntro || isModifier =>
- joinComment(List(topLevelTmplDef))
+ joinComment(topLevelTmplDef :: Nil)
case _ =>
- if (!isStatSep)
- syntaxErrorOrIncomplete("expected class or object definition", skipIt = true)
- Nil
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd("expected class or object definition", skipIt = true)(Nil)
})
acceptStatSepOpt()
}
@@ -2980,13 +2969,13 @@ self =>
def localDef(implicitMod: Int): List[Tree] = {
val annots = annotations(skipNewLines = true)
val pos = in.offset
- val mods = (localModifiers() | implicitMod) withAnnotations annots
+ val mods = (localModifiers() | implicitMod.toLong) withAnnotations annots
val defs =
if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
else List(tmplDef(pos, mods))
in.token match {
- case RBRACE | CASE => defs :+ (Literal(Constant(())) setPos o2p(in.offset))
+ case RBRACE | CASE => defs :+ setInPos(literalUnit)
case _ => defs
}
}
@@ -3036,7 +3025,7 @@ self =>
* CompilationUnit ::= {package QualId semi} TopStatSeq
* }}}
*/
- def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ def compilationUnit(): PackageDef = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
val ts = new ListBuffer[Tree]
while (in.token == SEMI) in.nextToken()
@@ -3044,6 +3033,9 @@ self =>
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
+ // TODO - this next line is supposed to be
+ // ts += packageObjectDef(start)
+ // but this broke a scaladoc test (run/diagrams-filtering.scala) somehow.
ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
if (in.token != EOF) {
acceptStatSep()
@@ -3072,8 +3064,8 @@ self =>
resetPackage()
topstats() match {
- case List(stat @ PackageDef(_, _)) => stat
- case stats =>
+ case (stat @ PackageDef(_, _)) :: Nil => stat
+ case stats =>
val start =
if (stats forall (_ == EmptyTree)) 0
else {
@@ -3082,7 +3074,7 @@ self =>
else 0
}
- makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
+ makeEmptyPackage(start, stats)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 88ff21a90d..80b6ad3cc7 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -13,6 +13,7 @@ import scala.annotation.{ switch, tailrec }
import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, ArrayBuffer }
import scala.xml.Utility.{ isNameStart }
+import scala.language.postfixOps
/** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon.
*/
@@ -466,6 +467,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\"') {
nextRawChar()
if (ch == '\"') {
+ offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
@@ -475,6 +477,7 @@ trait Scanners extends ScannersCommon {
strVal = ""
}
} else {
+ offset += 1
getStringPart(multiLine = false)
sepRegions = STRINGLIT :: sepRegions // indicate single line string part
}
@@ -558,7 +561,7 @@ trait Scanners extends ScannersCommon {
nextChar()
getOperatorRest()
} else {
- syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'")
nextChar()
}
}
@@ -695,7 +698,7 @@ trait Scanners extends ScannersCommon {
}
}
- @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
@@ -888,7 +891,7 @@ trait Scanners extends ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- charVal
+ charVal.toLong
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 80d70e6428..3a695c6f59 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -11,30 +11,98 @@ import javac._
/** An nsc sub-component.
*/
abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParsers with Scanners with JavaParsers with JavaScanners {
+ import global._
val phaseName = "parser"
-
def newPhase(prev: Phase): StdPhase = new ParserPhase(prev)
- class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+ abstract class MemberDefTraverser extends Traverser {
+ def onMember(defn: MemberDef): Unit
+
+ private var depth: Int = 0
+ private def lower[T](body: => T): T = {
+ depth += 1
+ try body finally depth -= 1
+ }
+ def currentDepth = depth
+
+ /** Prune this tree and all trees beneath it. Can be overridden. */
+ def prune(md: MemberDef): Boolean = (
+ md.mods.isSynthetic
+ || md.mods.isParamAccessor
+ || nme.isConstructorName(md.name)
+ || (md.name containsName nme.ANON_CLASS_NAME)
+ )
+
+ override def traverse(t: Tree): Unit = t match {
+ case md: MemberDef if prune(md) =>
+ case md @ PackageDef(_, stats) => traverseTrees(stats)
+ case md: ImplDef => onMember(md) ; lower(traverseTrees(md.impl.body))
+ case md: ValOrDefDef => onMember(md) ; lower(traverse(md.rhs))
+ case _ => super.traverse(t)
+ }
+ }
+
+ class MemberPosReporter(unit: CompilationUnit) extends MemberDefTraverser {
+ private var outputFn: MemberDef => String = outputForScreen
+ val path = unit.source.file.path
+
+ // If a single line, outputs the line; if it spans multiple lines
+ // outputs NN,NN with start and end lines, e.g. 15,25.
+ def outputPos(md: MemberDef): String = {
+ val pos = md.pos
+ val start = pos.focusStart.line
+ val end = pos.focusEnd.line
+
+ if (start == end) "" + start else s"$start,$end"
+ }
+ def outputForSed(md: MemberDef): String = {
+ val pos_s = "%-12s" format outputPos(md) + "p"
+ s"$pos_s $path # ${md.keyword} ${md.name}"
+ }
+ def outputForScreen(md: MemberDef): String = {
+ val pos_s = "%-20s" format " " * currentDepth + outputPos(md)
+ s"$pos_s ${md.keyword} ${md.name}"
+ }
+
+ def onMember(md: MemberDef) = println(outputFn(md))
+ // It recognizes "sed" and "anything else".
+ def show(style: String) {
+ if (style == "sed") {
+ outputFn = outputForSed
+ traverse(unit.body)
+ }
+ else {
+ outputFn = outputForScreen
+ println(path)
+ traverse(unit.body)
+ }
+ println("")
+ }
+ }
+
+ private def initialUnitBody(unit: CompilationUnit): Tree = {
+ if (unit.isJava) new JavaUnitParser(unit).parse()
+ else if (global.reporter.incompleteHandled) newUnitParser(unit).parse()
+ else newUnitParser(unit).smartParse()
+ }
+
+ class ParserPhase(prev: Phase) extends StdPhase(prev) {
override val checkable = false
override val keepsTypeParams = false
- def apply(unit: global.CompilationUnit) {
- import global._
+ def apply(unit: CompilationUnit) {
informProgress("parsing " + unit)
- // if the body is already filled in, do nothing
+ // if the body is already filled in, don't overwrite it
// otherwise compileLate is going to overwrite bodies of synthetic source files
- if (unit.body == EmptyTree) {
- unit.body =
- if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (reporter.incompleteHandled) newUnitParser(unit).parse()
- else newUnitParser(unit).smartParse()
- }
+ if (unit.body == EmptyTree)
+ unit.body = initialUnitBody(unit)
if (settings.Yrangepos && !reporter.hasErrors)
validatePositions(unit.body)
+
+ if (settings.Ymemberpos.isSetByUser)
+ new MemberPosReporter(unit) show (style = settings.Ymemberpos.value)
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 5f361f32b5..ec4e932aae 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -242,14 +242,25 @@ abstract class TreeBuilder {
Assign(lhs, rhs)
}
+ /** Tree for `od op`, start is start0 if od.pos is borked. */
+ def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
+ val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ atPos(r2p(start, end, end)) { new PostfixSelect(od, op.encode) }
+ }
+
/** A type tree corresponding to (possibly unary) intersection type */
def makeIntersectionTypeTree(tps: List[Tree]): Tree =
if (tps.tail.isEmpty) tps.head
else CompoundTypeTree(Template(tps, emptyValDef, Nil))
/** Create tree representing a while loop */
- def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
+ val lname = freshTermName(nme.WHILE_PREFIX)
+ def default = wrappingPos(List(cond, body)) match {
+ case p if p.isDefined => p.endOrPoint
+ case _ => startPos
+ }
+ val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant(())))
LabelDef(lname, Nil, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 1f9862596c..b8ddb65de9 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
import scala.collection.{ mutable, immutable }
@@ -441,15 +442,17 @@ abstract class ScalaPrimitives {
}
def addPrimitives(cls: Symbol, method: Name, code: Int) {
- val tpe = cls.info
- val sym = tpe.member(method)
- if (sym == NoSymbol)
- inform("Unknown primitive method " + cls + "." + method)
- for (s <- sym.alternatives)
- addPrimitive(
- s,
- if (code == ADD && s.info.paramTypes.head == definitions.StringClass.tpe) CONCAT
- else code)
+ val alts = (cls.info member method).alternatives
+ if (alts.isEmpty)
+ inform(s"Unknown primitive method $cls.$method")
+ else alts foreach (s =>
+ addPrimitive(s,
+ s.info.paramTypes match {
+ case tp :: _ if code == ADD && tp =:= StringTpe => CONCAT
+ case _ => code
+ }
+ )
+ )
}
def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 94fbba8066..26d10d2514 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -8,8 +8,7 @@ package backend
package icode
import scala.collection.{ mutable, immutable }
-import mutable.{ ListBuffer, ArrayBuffer }
-import scala.reflect.internal.util.{ Position, NoPosition }
+import mutable.ListBuffer
import backend.icode.analysis.ProgramPoint
import scala.language.postfixOps
@@ -17,8 +16,7 @@ trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ settings, debuglog, log, nme }
- import nme.isExceptionResultName
+ import global._
/** Override Array creation for efficiency (to not go through reflection). */
private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
@@ -38,7 +36,7 @@ trait BasicBlocks {
import BBFlags._
- def code = method.code
+ def code = if (method eq null) NoCode else method.code
private final class SuccessorList() {
private var successors: List[BasicBlock] = Nil
@@ -336,9 +334,9 @@ trait BasicBlocks {
* put into ignore mode so we hear about it if there's a problem.
*/
instr match {
- case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
- case STORE_LOCAL(local) if isExceptionResultName(local.sym.name) => // ok
- case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
+ case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
+ case STORE_LOCAL(local) if nme.isExceptionResultName(local.sym.name) => // ok
+ case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
}
}
}
@@ -416,7 +414,7 @@ trait BasicBlocks {
enterIgnoreMode()
}
}
-
+
/**
* Same as killIf but with the logic of the condition reversed
*/
@@ -477,16 +475,17 @@ trait BasicBlocks {
def directSuccessors: List[BasicBlock] =
if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => whereto :: Nil
- case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case SWITCH(_, labels) => labels
- case RETURN(_) => Nil
- case THROW(_) => Nil
- case _ =>
+ case JUMP(whereto) => whereto :: Nil
+ case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
+ case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
+ case SWITCH(_, labels) => labels
+ case RETURN(_) => Nil
+ case THROW(_) => Nil
+ case _ =>
if (closed)
- dumpClassesAndAbort("The last instruction is not a control flow instruction: " + lastInstruction)
- else Nil
+ devWarning(s"$lastInstruction/${lastInstruction.getClass.getName} is not a control flow instruction")
+
+ Nil
}
/** Returns the predecessors of this block. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 31028e64d3..7263a0d0b9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
@@ -22,12 +23,7 @@ abstract class GenICode extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
- import definitions.{
- ArrayClass, ObjectClass, ThrowableClass, StringClass, StringModule, AnyRefClass,
- Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
- BoxedNumberClass, BoxedCharacterClass,
- getMember
- }
+ import definitions._
import scalaPrimitives.{
isArrayOp, isComparisonOp, isLogicalOp,
isUniversalEqualityOp, isReferenceEqualityOp
@@ -181,7 +177,7 @@ abstract class GenICode extends SubComponent {
}
private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
- require(expr.tpe <:< ThrowableClass.tpe, expr.tpe)
+ require(expr.tpe <:< ThrowableTpe, expr.tpe)
val thrownKind = toTypeKind(expr.tpe)
val ctx1 = genLoad(expr, ctx, thrownKind)
@@ -301,7 +297,7 @@ abstract class GenICode extends SubComponent {
}
private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val Apply(fun, args) = tree
- val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+ val monitor = ctx.makeLocal(tree.pos, ObjectTpe, "monitor")
var monitorResult: Local = null
val argTpe = args.head.tpe
val hasResult = expectedType != UNIT
@@ -1467,7 +1463,7 @@ abstract class GenICode extends SubComponent {
*/
def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = {
def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
- ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR.toString)
+ ctx.makeLocal(l.pos, AnyRefTpe, nme.EQEQ_LOCAL_VAR.toString)
}
/* True if the equality comparison is between values that require the use of the rich equality
@@ -2038,7 +2034,7 @@ abstract class GenICode extends SubComponent {
this.addActiveHandler(exh) // .. and body aswell
val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
exhStartCtx.bb killIf outerCtx.bb.ignore
- val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
+ val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc")
loadException(exhStartCtx, exh, finalizer.pos)
exhStartCtx.bb.emit(STORE_LOCAL(exception))
val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT)
@@ -2164,8 +2160,7 @@ abstract class GenICode extends SubComponent {
* by a real JUMP instruction when all labels are resolved.
*/
abstract class PseudoJUMP(label: Label) extends Instruction {
- override def toString(): String = "PJUMP " + label.symbol
-
+ override def toString = s"PJUMP(${label.symbol})"
override def consumed = 0
override def produced = 0
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index c5fe3228a3..54be9d18f1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index c1cda2c863..4389afb2b7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
@@ -73,7 +74,7 @@ trait Members {
)
startBlock = b.successors.head
}
-
+
blocks -= b
assert(!blocks.contains(b))
method.exh filter (_ covers b) foreach (_.covered -= b)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index ff118be3c4..57a768d9cb 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
@@ -394,19 +395,19 @@ trait Opcodes { self: ICodes =>
override def category = mthdsCat
}
-
+
/**
* A place holder entry that allows us to parse class files with invoke dynamic
* instructions. Because the compiler doesn't yet really understand the
* behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
* this instruction will cause a failure. The only optimization that
* should ever look at non-Scala generated icode is the inliner, and it
- * has been modified to not examine any method with invokeDynamic
+ * has been modified to not examine any method with invokeDynamic
* instructions. So if this poison pill ever causes problems then
* there's been a serious misunderstanding
*/
// TODO do the real thing
- case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+ case class INVOKE_DYNAMIC(poolEntry: Int) extends Instruction {
private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
override def consumed = error
override def produced = error
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 338a07c872..0c3f92f13f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{ mutable, immutable }
@@ -456,7 +457,7 @@ abstract class CopyPropagation {
final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
val out = new copyLattice.State(state.bindings, state.stack)
out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
- if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor)
+ if (method.info.resultType != definitions.UnitTpe && !method.isConstructor)
out.stack = Unknown :: out.stack
if (!isPureMethod(method))
invalidateRecords(out)
@@ -541,7 +542,8 @@ abstract class CopyPropagation {
m.isGetter // abstract getters are still pure, as we 'know'
final override def toString() = (
- method.blocks map { b =>
+ if (method eq null) List("<null>")
+ else method.blocks map { b =>
"\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
"\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index ebc2d33a62..a378998f8f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{ mutable, immutable }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 26b7bc50d8..fecd48ed27 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -240,7 +240,8 @@ abstract class ReachingDefinitions {
findDefs(bb, idx, m, 0)
override def toString: String = {
- method.code.blocks map { b =>
+ if (method eq null) "<null>"
+ else method.code.blocks map { b =>
" entry(%s) = %s\n".format(b, in(b)) +
" exit(%s) = %s\n".format(b, out(b))
} mkString ("ReachingDefinitions {\n", "\n", "\n}")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 57380db7e7..7a53293384 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{mutable, immutable}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 66aed14d1c..0df828393d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -8,7 +8,6 @@ package backend.jvm
import java.io.{ DataOutputStream, FileOutputStream, IOException, OutputStream, File => JFile }
import scala.tools.nsc.io._
-import scala.tools.nsc.util.ScalaClassLoader
import java.util.jar.Attributes.Name
import scala.language.postfixOps
@@ -38,11 +37,22 @@ trait BytecodeWriters {
for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part
ensureDirectory(dir) fileNamed pathParts.last + suffix
}
- private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
getFile(outputDirectory(sym), clsName, suffix)
+ def factoryNonJarBytecodeWriter(): BytecodeWriter = {
+ val emitAsmp = settings.Ygenasmp.isSetByUser
+ val doDump = settings.Ydumpclasses.isSetByUser
+ (emitAsmp, doDump) match {
+ case (false, false) => new ClassBytecodeWriter { }
+ case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { }
+ case (true, false) => new ClassBytecodeWriter with AsmpBytecodeWriter
+ case (true, true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { }
+ }
+ }
+
trait BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit
def close(): Unit = ()
}
@@ -53,7 +63,9 @@ trait BytecodeWriters {
)
val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile == null,
+ "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.")
val path = jclassName + ".class"
val out = writer.newOutputStream(path)
@@ -65,9 +77,47 @@ trait BytecodeWriters {
override def close() = writer.close()
}
+ /*
+ * The ASM textual representation for bytecode overcomes disadvantages of javap ouput in three areas:
+ * (a) pickle dingbats undecipherable to the naked eye;
+ * (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
+ * (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap,
+ * their expansion by ASM is more readable.
+ *
+ * */
+ trait AsmpBytecodeWriter extends BytecodeWriter {
+ import scala.tools.asm
+
+ private val baseDir = Directory(settings.Ygenasmp.value).createDirectory()
+
+ private def emitAsmp(jclassBytes: Array[Byte], asmpFile: io.File) {
+ val pw = asmpFile.printWriter()
+ try {
+ val cnode = new asm.tree.ClassNode()
+ val cr = new asm.ClassReader(jclassBytes)
+ cr.accept(cnode, 0)
+ val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter()))
+ cnode.accept(trace)
+ trace.p.print(pw)
+ }
+ finally pw.close()
+ }
+
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
+
+ val segments = jclassName.split("[./]")
+ val asmpFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "asmp" toFile;
+
+ asmpFile.parent.createDirectory()
+ emitAsmp(jclassBytes, asmpFile)
+ }
+ }
+
trait ClassBytecodeWriter extends BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- val outfile = getFile(sym, jclassName, ".class")
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile != null,
+ "Precisely this override requires its invoker to hand out a non-null AbstractFile.")
val outstream = new DataOutputStream(outfile.bufferedOutput)
try outstream.write(jclassBytes, 0, jclassBytes.length)
@@ -79,8 +129,8 @@ trait BytecodeWriters {
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
val pathName = jclassName
val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 66a58870cc..ea2cbbe3d3 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.jvm
import scala.collection.{ mutable, immutable }
@@ -25,6 +26,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import icodes.opcodes._
import definitions._
+ // Strangely I can't find this in the asm code
+ // 255, but reserving 1 for "this"
+ final val MaximumJvmParameters = 254
+
val phaseName = "jvm"
/** Create a new phase */
@@ -47,6 +52,22 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
override def erasedTypes = true
def apply(cls: IClass) = sys.error("no implementation")
+ // An AsmPhase starts and ends within a Run, thus the caches in question will get populated and cleared within a Run, too), SI-7422
+ javaNameCache.clear()
+ javaNameCache ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
+ )
+
+ // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
+ reverseJavaName.clear()
+ reverseJavaName ++= List(
+ binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+ binarynme.RuntimeNull.toString() -> RuntimeNullClass
+ )
+
// Lazy val; can't have eager vals in Phase constructors which may
// cause cycles before Global has finished initialization.
lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
@@ -71,16 +92,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
new DirectToJarfileWriter(f.file)
- case _ =>
- if (settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
- // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
- // (a) unreadable pickle;
- // (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
- // (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, their expansion makes more sense instead.
+ case _ => factoryNonJarBytecodeWriter()
}
}
@@ -107,9 +119,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
debuglog(s"Created new bytecode generator for ${classes.size} classes.")
val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
- val plainCodeGen = new JPlainBuilder(bytecodeWriter)
- val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter)
- val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
+ val needsOutfile = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+ val plainCodeGen = new JPlainBuilder( bytecodeWriter, needsOutfile)
+ val mirrorCodeGen = new JMirrorBuilder( bytecodeWriter, needsOutfile)
+ val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter, needsOutfile)
def emitFor(c: IClass) {
if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
@@ -134,8 +147,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
bytecodeWriter.close()
- classes.clear()
- reverseJavaName.clear()
/* don't javaNameCache.clear() because that causes the following tests to fail:
* test/files/run/macro-repl-dontexpand.scala
@@ -159,19 +170,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var pickledBytes = 0 // statistics
- // Don't put this in per run caches. Contains entries for classes as well as members.
- val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
+ val javaNameCache = perRunCaches.newMap[Symbol, Name]()
// unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
- val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
- binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
- binarynme.RuntimeNull.toString() -> RuntimeNullClass
- )
+ val reverseJavaName = perRunCaches.newMap[String, Symbol]()
private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
@@ -375,7 +377,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
private val classfileVersion: Int = settings.target.value match {
case "jvm-1.5" => asm.Opcodes.V1_5
- case "jvm-1.5-asm" => asm.Opcodes.V1_5
case "jvm-1.6" => asm.Opcodes.V1_6
case "jvm-1.7" => asm.Opcodes.V1_7
}
@@ -403,7 +404,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
/** basic functionality for class file building */
- abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+ abstract class JBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) {
val EMPTY_STRING_ARRAY = Array.empty[String]
@@ -414,7 +415,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val INNER_CLASSES_FLAGS =
(asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
// -----------------------------------------------------------------------------------------
// factory methods
@@ -461,7 +462,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
try {
val arr = jclass.toByteArray()
- bytecodeWriter.writeClass(label, jclassName, arr, sym)
+ val outF: scala.tools.nsc.io.AbstractFile = {
+ if(needsOutfile) getFile(sym, jclassName, ".class") else null
+ }
+ bytecodeWriter.writeClass(label, jclassName, arr, outF)
} catch {
case e: java.lang.RuntimeException if(e.getMessage() == "Class file too large!") =>
// TODO check where ASM throws the equivalent of CodeSizeTooBigException
@@ -536,7 +540,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
collectInnerClass(sym)
- val hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+ val hasInternalName = sym.isClass || sym.isModuleNotMethod
val cachedJN = javaNameCache.getOrElseUpdate(sym, {
if (hasInternalName) { sym.javaBinaryName }
else { sym.javaSimpleName }
@@ -644,11 +648,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
- val flags = mkFlags(
+ val flagsWithFinal: Int = mkFlags(
if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
javaFlags(innerSym),
if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
val jname = javaName(innerSym) // never null
val oname = outerName(innerSym) // null when method-enclosed
val iname = innerName(innerSym) // null for anonymous inner class
@@ -692,7 +697,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** functionality for building plain and mirror classes */
- abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
def debugLevel = settings.debuginfo.indexOfChoice
@@ -1242,8 +1247,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case class BlockInteval(start: BasicBlock, end: BasicBlock)
/** builder of plain classes */
- class JPlainBuilder(bytecodeWriter: BytecodeWriter)
- extends JCommonBuilder(bytecodeWriter)
+ class JPlainBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean)
+ extends JCommonBuilder(bytecodeWriter, needsOutfile)
with JAndroidBuilder {
val MIN_SWITCH_DENSITY = 0.7
@@ -1266,9 +1271,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
private def getSuperInterfaces(c: IClass): Array[String] = {
// Additional interface parents based on annotations and other cues
- def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case RemoteAttr => Some(RemoteInterfaceClass)
- case _ => None
+ def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
+ case RemoteAttr => RemoteInterfaceClass
+ case _ => NoSymbol
}
/* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
@@ -1291,7 +1296,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val ps = c.symbol.info.parents
val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
- val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
+ val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct
if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
else mkArray(minimizeInterfaces(superInterfaces) map javaName)
@@ -1480,6 +1485,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
+ if (m.params.size > MaximumJvmParameters) {
+ getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ return
+ }
+
debuglog("Generating method " + m.symbol.fullName)
method = m
computeLocalVarsIndex(m)
@@ -1616,6 +1626,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
+ computeLocalVarsIndex(m)
genCode(m, emitVars = false, isStatic = true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
@@ -1651,8 +1662,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case BooleanTag => jcode.boolconst(const.booleanValue)
- case ByteTag => jcode.iconst(const.byteValue)
- case ShortTag => jcode.iconst(const.shortValue)
+ case ByteTag => jcode.iconst(const.byteValue.toInt)
+ case ShortTag => jcode.iconst(const.shortValue.toInt)
case CharTag => jcode.iconst(const.charValue)
case IntTag => jcode.iconst(const.intValue)
@@ -1699,6 +1710,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
+ def iconst(cst: Char) { iconst(cst.toInt) }
def iconst(cst: Int) {
if (cst >= -1 && cst <= 5) {
jmethod.visitInsn(Opcodes.ICONST_0 + cst)
@@ -1849,7 +1861,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val isDenseEnough: Boolean = {
/* Calculate in long to guard against overflow. TODO what overflow??? */
val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
- val klenD: Double = keys.length
+ val klenD: Double = keys.length.toDouble
val kdensity: Double = (klenD / keyRangeD)
kdensity >= minDensity
@@ -2236,13 +2248,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
}
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- } // end of genCode()'s isAccessibleFrom()
-
def genCallMethod(call: CALL_METHOD) {
val CALL_METHOD(method, style) = call
val siteSymbol = clasz.symbol
@@ -2878,7 +2883,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** builder of mirror classes */
- class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+ class JMirrorBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JCommonBuilder(bytecodeWriter, needsOutfile) {
private var cunit: CompilationUnit = _
def getCurrentCUnit(): CompilationUnit = cunit
@@ -2930,7 +2935,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** builder of bean info classes */
- class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
/**
* Generate a bean info class that describes the given class.
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 2ad474cf3f..2c3bf26958 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -5,34 +5,19 @@
package scala.tools.nsc
package backend.jvm
-import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
-/** Code shared between the legagy backend [[scala.tools.nsc.backend.jvm.GenJVM]]
- * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
- * more here, but for now I'm starting with the refactorings that are either
- * straightforward to review or necessary for maintenance.
- */
+/** Code shared between the erstwhile legacy backend (aka GenJVM)
+ * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
+ * more here, but for now I'm starting with the refactorings that are either
+ * straightforward to review or necessary for maintenance.
+ */
trait GenJVMASM {
val global: Global
import global._
import icodes._
import definitions._
- protected def outputDirectory(sym: Symbol): AbstractFile =
- settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile)
-
- protected def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
- var dir = base
- val pathParts = clsName.split("[./]").toList
- for (part <- pathParts.init) {
- dir = dir.subdirectoryNamed(part)
- }
- dir.fileNamed(pathParts.last + suffix)
- }
- protected def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), clsName, suffix)
-
protected val ExcludedForwarderFlags = {
import Flags._
// Should include DEFERRED but this breaks findMember.
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index aaf2c55dcd..bde17b28fc 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -71,8 +71,10 @@ abstract class ClosureElimination extends SubComponent {
def name = phaseName
val closser = new ClosureElim
- override def apply(c: IClass): Unit =
- closser analyzeClass c
+ override def apply(c: IClass): Unit = {
+ if (closser ne null)
+ closser analyzeClass c
+ }
}
/**
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
index ff93206ffd..43c8527f41 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -3,7 +3,8 @@
* @author James Iry
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.opt
import scala.tools.nsc.backend.icode.analysis.LubException
@@ -17,7 +18,7 @@ import scala.annotation.tailrec
* null checks.
*
* With some more work it could be extended to
- * - cache stable values (final fields, modules) in locals
+ * - cache stable values (final fields, modules) in locals
* - replace the copy propagation in ClosureElilmination
* - fold constants
* - eliminate unnecessary stores and loads
@@ -118,18 +119,18 @@ abstract class ConstantOptimization extends SubComponent {
*
* // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3
* Possible(xs) merge Possible(ys) => Possible(xs union ys)
- *
+ *
* // Left says can't be 2 or 3, right says can't be 3 or 4
* // then it's not 3 (it could be 2 from the right or 4 from the left)
* Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys)
- *
+ *
* // Left says it can't be 2 or 3, right says it must be 3 or 4, then
* // it can't be 2 (left rules out 4 and right says 3 is possible)
* Impossible(xs) merge Possible(ys) => Impossible(xs -- ys)
- *
+ *
* Intuitively, Possible(empty) says that a location can't hold anything,
* it's uninitialized. However, Possible(empty) never appears in the code.
- *
+ *
* Conversely, Impossible(empty) says nothing is impossible, it could be
* anything. Impossible(empty) is given a synonym UNKNOWN and is used
* for, e.g., the result of an arbitrary method call.
@@ -155,7 +156,7 @@ abstract class ConstantOptimization extends SubComponent {
def mightNotEqual(other: Contents): Boolean
}
private def SingleImpossible(x: Datum) = new Impossible(Set(x))
-
+
/**
* The location is known to have one of a set of values.
*/
@@ -299,32 +300,32 @@ abstract class ConstantOptimization extends SubComponent {
private def interpretInst(in: State, inst: Instruction): State = {
// pop the consumed number of values off the `in` state's stack, producing a new state
def dropConsumed: State = in drop inst.consumed
-
+
inst match {
case THIS(_) =>
in load THIS_LOCAL
-
+
case CONSTANT(k) =>
// treat NaN as UNKNOWN because NaN must never equal NaN
val const = if (k.isNaN) UNKNOWN
else SinglePossible(Const(k))
in push const
-
+
case LOAD_ARRAY_ITEM(_) | LOAD_FIELD(_, _) | CALL_PRIMITIVE(_) =>
dropConsumed push UNKNOWN
case LOAD_LOCAL(local) =>
// TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant
in load local
-
+
case STORE_LOCAL(local) =>
in store local
-
+
case STORE_THIS(_) =>
// if a local is already known to have a constant and we're replacing with the same constant then we can
// replace this with a drop
in store THIS_LOCAL
-
+
case CALL_METHOD(_, _) =>
// TODO we could special case implementations of equals that are known, e.g. String#equals
// We could turn Possible(string constants).equals(Possible(string constants) into an eq check
@@ -332,7 +333,7 @@ abstract class ConstantOptimization extends SubComponent {
// and eliminate the null check that likely precedes this call
val initial = dropConsumed
(0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN }
-
+
case BOX(_) =>
val value = in peek 0
// we simulate boxing by, um, boxing the possible/impossible contents
@@ -345,7 +346,7 @@ abstract class ConstantOptimization extends SubComponent {
case Impossible(values) => Impossible(values map Boxed)
}
dropConsumed push newValue
-
+
case UNBOX(_) =>
val value = in peek 0
val newValue = value match {
@@ -373,42 +374,42 @@ abstract class ConstantOptimization extends SubComponent {
}
}
dropConsumed push newValue
-
+
case LOAD_MODULE(_) | NEW(_) | LOAD_EXCEPTION(_) =>
in push NOT_NULL
case CREATE_ARRAY(_, _) =>
dropConsumed push NOT_NULL
-
+
case IS_INSTANCE(_) =>
// TODO IS_INSTANCE is going to be followed by a C(Z)JUMP
// and if IS_INSTANCE/C(Z)JUMP the branch for "true" can
// know that whatever was checked was not a null
// see the TODO on CJUMP for more information about propagating null
// information
- // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and
+ // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and
// replace with a constant false, but how often is a knowable null checked for instanceof?
// TODO we could track type information and statically know to eliminate IS_INSTANCE
// which might be a nice win under specialization
dropConsumed push UNKNOWN // it's actually a Possible(true, false) but since the following instruction
// will be a conditional jump comparing to true or false there
// nothing to be gained by being more precise
-
+
case CHECK_CAST(_) =>
// TODO we could track type information and statically know to eliminate CHECK_CAST
// but that's probably not a huge win
in
-
+
case DUP(_) =>
val value = in peek 0
in push value
-
+
case DROP(_) | MONITOR_ENTER() | MONITOR_EXIT() | STORE_ARRAY_ITEM(_) | STORE_FIELD(_, _) =>
dropConsumed
case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
in
-
+
case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) =>
dumpClassesAndAbort("Unexpected block ending instruction: " + inst)
}
@@ -468,7 +469,7 @@ abstract class ConstantOptimization extends SubComponent {
val replacements = if (result.size == 1) List.fill(inst.consumed)(DROP(kind)) :+ JUMP(result.keySet.head)
else inst :: Nil
-
+
(result, replacements)
}
@@ -488,8 +489,7 @@ abstract class ConstantOptimization extends SubComponent {
case SWITCH(tags, labels) =>
val in1 = in peek 0
- val newStuff = tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) }
- val (reachableTags, reachableNormalLabels) = (tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) }).unzip
+ val reachableNormalLabels = tags zip labels collect { case (tagSet, label) if canSwitch(in1, tagSet) => label }
val reachableLabels = if (labels.lengthCompare(tags.length) > 0) {
// if we've got an extra label then it's the default
val defaultLabel = labels.last
@@ -533,7 +533,7 @@ abstract class ConstantOptimization extends SubComponent {
// number of instructions excluding the last one
val normalCount = block.size - 1
- var exceptionState = in.cleanStack
+ val exceptionState = in.cleanStack
var normalExitState = in
var idx = 0
while (idx < normalCount) {
@@ -569,7 +569,7 @@ abstract class ConstantOptimization extends SubComponent {
// worklist of basic blocks to process, initially the start block
val worklist = MSet(m.startBlock)
- // worklist of exception basic blocks. They're kept in a separate set so they can be
+ // worklist of exception basic blocks. They're kept in a separate set so they can be
// processed after normal flow basic blocks. That's because exception basic blocks
// are more likely to have multiple predecessors and queueing them for later
// increases the chances that they'll only need to be interpreted once
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 1026e95fac..483bff6467 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -33,7 +33,7 @@ abstract class DeadCodeElimination extends SubComponent {
val dce = new DeadCode()
override def apply(c: IClass) {
- if (settings.Xdce)
+ if (settings.Xdce && (dce ne null))
dce.analyzeClass(c)
}
}
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index ee3e2b04d1..2967f67e9c 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -10,7 +10,7 @@ import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException,
import java.util.jar._
import scala.collection.JavaConverters._
import Attributes.Name
-import scala.language.implicitConversions
+import scala.language.{ implicitConversions, postfixOps }
// Attributes.Name instances:
//
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
index 7c6dbe2e60..1c926aff6b 100644
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ b/src/compiler/scala/tools/nsc/io/Lexer.scala
@@ -23,7 +23,7 @@ object Lexer {
/** A subclass of `Token` representing single-character delimiters
* @param char the delimiter character making up this token
*/
- case class Delim(char: Char) extends Token("'"+char.toString+"'")
+ case class Delim(char: Char) extends Token(s"'$char'")
/** A subclass of token representing integer literals */
case class IntLit(override val str: String) extends Token(str)
@@ -88,7 +88,7 @@ object Lexer {
case '\\' => buf ++= "\\\\"
case _ =>
if (' ' <= ch && ch < 128) buf += ch
- else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch)
+ else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch.toInt)
}
}
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
index acd4847469..11d3703983 100644
--- a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
+++ b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
@@ -16,7 +16,7 @@ class PrettyWriter(wr: Writer) extends Writer {
str(off) match {
case '{' | '[' | '(' =>
indent += 1
- wr.write(str(off))
+ wr.write(str(off).toInt)
newLine()
wr.write(str, off + 1, len - 1)
case '}' | ']' | ')' =>
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index f1b1d1a9a7..2a799acbc7 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -251,15 +251,15 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def basicType(): Tree =
atPos(in.pos) {
in.token match {
- case BYTE => in.nextToken(); TypeTree(ByteClass.tpe)
- case SHORT => in.nextToken(); TypeTree(ShortClass.tpe)
- case CHAR => in.nextToken(); TypeTree(CharClass.tpe)
- case INT => in.nextToken(); TypeTree(IntClass.tpe)
- case LONG => in.nextToken(); TypeTree(LongClass.tpe)
- case FLOAT => in.nextToken(); TypeTree(FloatClass.tpe)
- case DOUBLE => in.nextToken(); TypeTree(DoubleClass.tpe)
- case BOOLEAN => in.nextToken(); TypeTree(BooleanClass.tpe)
- case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
+ case BYTE => in.nextToken(); TypeTree(ByteTpe)
+ case SHORT => in.nextToken(); TypeTree(ShortTpe)
+ case CHAR => in.nextToken(); TypeTree(CharTpe)
+ case INT => in.nextToken(); TypeTree(IntTpe)
+ case LONG => in.nextToken(); TypeTree(LongTpe)
+ case FLOAT => in.nextToken(); TypeTree(FloatTpe)
+ case DOUBLE => in.nextToken(); TypeTree(DoubleTpe)
+ case BOOLEAN => in.nextToken(); TypeTree(BooleanTpe)
+ case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
}
}
@@ -293,15 +293,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (in.token == QMARK) {
val pos = in.currentPos
in.nextToken()
- var lo: Tree = TypeTree(NothingClass.tpe)
- var hi: Tree = TypeTree(AnyClass.tpe)
- if (in.token == EXTENDS) {
- in.nextToken()
- hi = typ()
- } else if (in.token == SUPER) {
- in.nextToken()
- lo = typ()
- }
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree
+ val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree
val tdef = atPos(pos) {
TypeDef(
Modifiers(Flags.JAVA | Flags.DEFERRED),
@@ -375,6 +368,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case FINAL =>
flags |= Flags.FINAL
in.nextToken()
+ case DEFAULT =>
+ flags |= Flags.DEFAULTMETHOD
+ in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
in.nextToken()
@@ -408,15 +404,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParam(): TypeDef =
atPos(in.currentPos) {
val name = identForType()
- val hi =
- if (in.token == EXTENDS) {
- in.nextToken()
- bound()
- } else {
- scalaDot(tpnme.Any)
- }
- TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, List(),
- TypeBoundsTree(scalaDot(tpnme.Nothing), hi))
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
+ TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, Nil, TypeBoundsTree(EmptyTree, hi))
}
def bound(): Tree =
@@ -473,7 +462,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
var rtpt =
if (isVoid) {
in.nextToken()
- TypeTree(UnitClass.tpe) setPos in.pos
+ TypeTree(UnitTpe) setPos in.pos
} else typ()
var pos = in.currentPos
val rtptName = rtpt match {
@@ -499,8 +488,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val vparams = formalParams()
if (!isVoid) rtpt = optArrayBrackets(rtpt)
optThrows()
+ val bodyOk = !inInterface || (mods hasFlag Flags.DEFAULTMETHOD)
val body =
- if (!inInterface && in.token == LBRACE) {
+ if (bodyOk && in.token == LBRACE) {
methodBody()
} else {
if (parentToken == AT && in.token == DEFAULT) {
@@ -508,7 +498,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
atPos(pos) {
New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
}
- mods1 = mods1 withAnnotations List(annot)
+ mods1 = mods1 withAnnotations annot :: Nil
skipTo(SEMI)
accept(SEMI)
blankExpr
@@ -795,7 +785,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
blankExpr),
DefDef(
Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
- List(List(makeParam("x", TypeTree(StringClass.tpe)))),
+ List(List(makeParam("x", TypeTree(StringTpe)))),
enumType,
blankExpr))
accept(RBRACE)
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index f9b1e57e66..e987b6de2f 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -724,7 +724,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- if (name.length > 0) name.charAt(0) else 0
+ if (name.length > 0) name.charAt(0).toLong else 0
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index ee1668a38a..761fd79358 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -10,10 +10,6 @@ package object nsc {
val Mode = scala.reflect.internal.Mode
def EXPRmode = Mode.EXPRmode
- def BYVALmode = Mode.BYVALmode
- def POLYmode = Mode.POLYmode
- def TAPPmode = Mode.TAPPmode
- def FUNmode = Mode.FUNmode
type Phase = scala.reflect.internal.Phase
val NoPhase = scala.reflect.internal.NoPhase
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index b0113f7696..a584a4ed5d 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -93,15 +93,13 @@ object Plugin {
type AnyClass = Class[_]
/** Use a class loader to load the plugin class.
- *
- * @return `None` on failure
*/
- def load(pd: PluginDescription, loader: ClassLoader): Try[AnyClass] = {
+ def load(classname: String, loader: ClassLoader): Try[AnyClass] = {
Try[AnyClass] {
- loader loadClass pd.classname
+ loader loadClass classname
} recoverWith {
case _: Exception =>
- Failure(new RuntimeException(s"Warning: class not found: ${pd.classname}"))
+ Failure(new RuntimeException(s"Warning: class not found: ${classname}"))
}
}
@@ -137,9 +135,8 @@ object Plugin {
case _ => false
}
val (locs, pds) = ((explicit ::: exploded ::: included) filterNot ignored).unzip
-
val loader = loaderFor(locs.distinct)
- pds filter (_.isSuccess) map (_.get) map (Plugin load (_, loader))
+ (pds filter (_.isSuccess) map (_.get.classname)).distinct map (Plugin load (_, loader))
}
/** Instantiate a plugin class, given the class and
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index a591482392..8f7794fa90 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -30,9 +30,8 @@ trait Plugins {
val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map injectDefault map Path.apply
val maybes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
val (goods, errors) = maybes partition (_.isSuccess)
- errors foreach (_ recover {
- case e: Exception => inform(e.getMessage)
- })
+ // Explicit parameterization of recover to suppress -Xlint warning about inferred Any
+ errors foreach (_.recover[Any] { case e: Exception => inform(e.getMessage) })
val classes = goods map (_.get) // flatten
// Each plugin must only be instantiated once. A common pattern
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index bda195f9d3..fdb5c72c3d 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index 783e249931..c9718f711a 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package settings
trait AbsScalaSettings {
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 4d086e787e..cd23ad74e4 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -251,8 +251,7 @@ class MutableSettings(val errorFn: String => Unit)
else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false))
new PlainFile(Path(name))
else
-// throw new FatalError(name + " does not exist or is not a directory")
- dir
+ throw new FatalError(name + " does not exist or is not a directory")
)
/** Set the single output directory. From now on, all files will
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index ee9a3aed2a..fe9165203f 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.tools
+package scala
+package tools
package nsc
package settings
@@ -156,14 +157,14 @@ trait ScalaSettings extends AbsScalaSettings
val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
val skip = PhasesSetting ("-Yskip", "Skip")
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
+ val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
- val refinementMethodDispatch
- = ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
+ val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true)
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
@@ -171,7 +172,6 @@ trait ScalaSettings extends AbsScalaSettings
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").
withDeprecationMessage("This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.")
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
- val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index d6a0149411..da1cc0c4cf 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.tools.nsc.settings
+package scala
+package tools.nsc.settings
/**
* Represents a single Scala version in a manner that
@@ -19,7 +20,7 @@ abstract class ScalaVersion extends Ordered[ScalaVersion] {
*/
case object NoScalaVersion extends ScalaVersion {
def unparse = "none"
-
+
def compare(that: ScalaVersion): Int = that match {
case NoScalaVersion => 0
case _ => 1
@@ -33,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
* to segregate builds
*/
case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
- def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
def compare(that: ScalaVersion): Int = that match {
case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
@@ -48,7 +49,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
else build compare thatBuild
case AnyScalaVersion => 1
case NoScalaVersion => -1
- }
+ }
}
/**
@@ -56,7 +57,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
*/
case object AnyScalaVersion extends ScalaVersion {
def unparse = "any"
-
+
def compare(that: ScalaVersion): Int = that match {
case AnyScalaVersion => 0
case _ => -1
@@ -70,7 +71,7 @@ object ScalaVersion {
private val dot = "\\."
private val dash = "\\-"
private def not(s:String) = s"[^${s}]"
- private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
def errorAndValue() = {
@@ -82,41 +83,41 @@ object ScalaVersion {
)
AnyScalaVersion
}
-
+
def toInt(s: String) = s match {
case null | "" => 0
case _ => s.toInt
}
-
+
def isInt(s: String) = util.Try(toInt(s)).isSuccess
-
+
def toBuild(s: String) = s match {
case null | "FINAL" => Final
case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
case _ => Development(s)
}
-
+
try versionString match {
case "none" => NoScalaVersion
case "any" => AnyScalaVersion
- case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
- case _ =>
+ case _ =>
errorAndValue()
} catch {
case e: NumberFormatException => errorAndValue()
}
}
-
- def apply(versionString: String): ScalaVersion =
+
+ def apply(versionString: String): ScalaVersion =
apply(versionString, msg => throw new NumberFormatException(msg))
-
+
/**
* The version of the compiler running now
*/
val current = apply(util.Properties.versionNumberString)
-
+
/**
* The 2.8.0 version.
*/
@@ -126,7 +127,7 @@ object ScalaVersion {
/**
* Represents the data after the dash in major.minor.rev-build
*/
-abstract class ScalaBuild extends Ordered[ScalaBuild] {
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
/**
* Return a version of this build information that can be parsed back into the
* same ScalaBuild
@@ -138,7 +139,7 @@ abstract class ScalaBuild extends Ordered[ScalaBuild] {
*/
case class Development(id: String) extends ScalaBuild {
def unparse = s"-${id}"
-
+
def compare(that: ScalaBuild) = that match {
// sorting two development builds based on id is reasonably valid for two versions created with the same schema
// otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
@@ -154,7 +155,7 @@ case class Development(id: String) extends ScalaBuild {
*/
case object Final extends ScalaBuild {
def unparse = ""
-
+
def compare(that: ScalaBuild) = that match {
case Final => 0
// a final is newer than anything other than a development build or another final
@@ -168,14 +169,14 @@ case object Final extends ScalaBuild {
*/
case class RC(n: Int) extends ScalaBuild {
def unparse = s"-RC${n}"
-
+
def compare(that: ScalaBuild) = that match {
// compare two rcs based on their RC numbers
case RC(thatN) => n - thatN
// an rc is older than anything other than a milestone or another rc
case Milestone(_) => 1
- case _ => -1
- }
+ case _ => -1
+ }
}
/**
@@ -183,12 +184,12 @@ case class RC(n: Int) extends ScalaBuild {
*/
case class Milestone(n: Int) extends ScalaBuild {
def unparse = s"-M${n}"
-
+
def compare(that: ScalaBuild) = that match {
// compare two milestones based on their milestone numbers
case Milestone(thatN) => n - thatN
// a milestone is older than anything other than another milestone
case _ => -1
-
- }
+
+ }
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 22482bf1b6..fd85bbb169 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -104,8 +104,15 @@ abstract class SymbolLoaders {
val clazz = enterClass(root, name, completer)
val module = enterModule(root, name, completer)
if (!clazz.isAnonymousClass) {
- assert(clazz.companionModule == module, module)
- assert(module.companionClass == clazz, clazz)
+ // Diagnostic for SI-7147
+ def msg: String = {
+ def symLocation(sym: Symbol) = if (sym == null) "null" else s"${clazz.fullLocationString} (from ${clazz.associatedFile})"
+ sm"""Inconsistent class/module symbol pair for `$name` loaded from ${symLocation(root)}.
+ |clazz = ${symLocation(clazz)}; clazz.companionModule = ${clazz.companionModule}
+ |module = ${symLocation(module)}; module.companionClass = ${module.companionClass}"""
+ }
+ assert(clazz.companionModule == module, msg)
+ assert(module.companionClass == clazz, msg)
}
}
@@ -237,9 +244,9 @@ abstract class SymbolLoaders {
}
class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
- private object classfileParser extends ClassfileParser {
+ private object classfileParser extends {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- }
+ } with ClassfileParser
protected def description = "class file "+ classfile.toString
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index bc7c129c20..cbfe5460f6 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package symtab
package classfile
@@ -12,9 +13,11 @@ import java.lang.Integer.toHexString
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
+import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
import scala.tools.nsc.io.AbstractFile
+
/** This abstract class implements a class file parser.
*
* @author Martin Odersky
@@ -23,6 +26,7 @@ import scala.tools.nsc.io.AbstractFile
abstract class ClassfileParser {
val global: Global
import global._
+ import definitions._
import scala.reflect.internal.ClassfileConstants._
import Flags._
@@ -35,34 +39,50 @@ abstract class ClassfileParser {
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
- protected var busy: Option[Symbol] = None // lock to detect recursive reads
+ protected var busy: Symbol = _ // lock to detect recursive reads
protected var currentClass: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
protected def moduleClass: Symbol = staticModule.moduleClass
+ private var sawPrivateConstructor = false
+
+ private def ownerForFlags(jflags: JavaAccFlags) = if (jflags.isStatic) moduleClass else clazz
def srcfile = srcfile0
+ private def optimized = global.settings.optimise.value
private def currentIsTopLevel = !(currentClass.decodedName containsChar '$')
+ // u1, u2, and u4 are what these data types are called in the JVM spec.
+ // They are an unsigned byte, unsigned char, and unsigned int respectively.
+ // We bitmask u1 into an Int to make sure it's 0-255 (and u1 isn't used
+ // for much beyond tags) but leave u2 alone as it's already unsigned.
+ protected final def u1(): Int = in.nextByte & 0xFF
+ protected final def u2(): Int = in.nextChar.toInt
+ protected final def u4(): Int = in.nextInt
+
+ private def readInnerClassFlags() = readClassFlags()
+ private def readClassFlags() = JavaAccFlags classFlags u2
+ private def readMethodFlags() = JavaAccFlags methodFlags u2
+ private def readFieldFlags() = JavaAccFlags fieldFlags u2
+ private def readTypeName() = readName().toTypeName
+ private def readName() = pool getName u2
+ private def readType() = pool getType u2
+
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
}
private def handleMissing(e: MissingRequirementError) = {
if (settings.debug) e.printStackTrace
- throw new IOException("Missing dependency '" + e.req + "', required by " + in.file)
+ throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
}
private def handleError(e: Exception) = {
if (settings.debug) e.printStackTrace()
- throw new IOException("class file '%s' is broken\n(%s/%s)".format(
- in.file,
- e.getClass,
- if (e.getMessage eq null) "" else e.getMessage)
- )
+ throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
}
private def mismatchError(c: Symbol) = {
- throw new IOException("class file '%s' has location not matching its contents: contains ".format(in.file) + c)
+ throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
}
private def parseErrorHandler[T]: PartialFunction[Throwable, T] = {
@@ -70,16 +90,15 @@ abstract class ClassfileParser {
case e: RuntimeException => handleError(e)
}
@inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
- busy match {
- case Some(`sym`) => throw new IOException("unsatisfiable cyclic dependency in '%s'".format(sym))
- case Some(sym1) => throw new IOException("illegal class file dependency between '%s' and '%s'".format(sym, sym1))
- case _ => ()
- }
+ if (busy eq sym)
+ throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
+ else if ((busy ne null) && (busy ne NoSymbol))
+ throw new IOException(s"illegal class file dependency between '$sym' and '$busy'")
- busy = Some(sym)
+ busy = sym
try body
catch parseErrorHandler
- finally busy = None
+ finally busy = NoSymbol
}
@inline private def raiseLoaderLevel[T](body: => T): T = {
loaders.parentsLevel += 1
@@ -106,68 +125,60 @@ abstract class ClassfileParser {
}
private def parseHeader() {
- val magic = in.nextInt
+ val magic = u4
if (magic != JAVA_MAGIC)
- throw new IOException("class file '" + in.file + "' "
- + "has wrong magic number 0x" + toHexString(magic)
- + ", should be 0x" + toHexString(JAVA_MAGIC))
- val minorVersion = in.nextChar.toInt
- val majorVersion = in.nextChar.toInt
- if ((majorVersion < JAVA_MAJOR_VERSION) ||
- ((majorVersion == JAVA_MAJOR_VERSION) &&
- (minorVersion < JAVA_MINOR_VERSION)))
- throw new IOException("class file '" + in.file + "' "
- + "has unknown version "
- + majorVersion + "." + minorVersion
- + ", should be at least "
- + JAVA_MAJOR_VERSION + "." + JAVA_MINOR_VERSION)
+ abort(s"class file ${in.file} has wrong magic number 0x${toHexString(magic)}")
+
+ val minor, major = u2
+ if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION)
+ abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
}
class ConstantPool {
- private val len = in.nextChar
- private val starts = new Array[Int](len)
- private val values = new Array[AnyRef](len)
+ private val len = u2
+ private val starts = new Array[Int](len)
+ private val values = new Array[AnyRef](len)
private val internalized = new Array[Name](len)
{ var i = 1
while (i < starts.length) {
starts(i) = in.bp
i += 1
- (in.nextByte.toInt: @switch) match {
- case CONSTANT_UTF8 | CONSTANT_UNICODE =>
- in.skip(in.nextChar)
- case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
- in.skip(2)
- case CONSTANT_METHODHANDLE =>
- in.skip(3)
- case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
- | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
- | CONSTANT_INVOKEDYNAMIC =>
- in.skip(4)
- case CONSTANT_LONG | CONSTANT_DOUBLE =>
- in.skip(8)
- i += 1
- case _ =>
- errorBadTag(in.bp - 1)
+ (u1: @switch) match {
+ case CONSTANT_UTF8 | CONSTANT_UNICODE => in skip u2
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => in skip 2
+ case CONSTANT_METHODHANDLE => in skip 3
+ case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF => in skip 4
+ case CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT => in skip 4
+ case CONSTANT_INVOKEDYNAMIC => in skip 4
+ case CONSTANT_LONG | CONSTANT_DOUBLE => in skip 8 ; i += 1
+ case _ => errorBadTag(in.bp - 1)
}
}
}
- /** Return the name found at given index. */
- def getName(index: Int): Name = {
- if (index <= 0 || len <= index)
- errorBadIndex(index)
+ def recordAtIndex[T <: AnyRef](value: T, idx: Int): T = {
+ values(idx) = value
+ value
+ }
- values(index) match {
+ def firstExpecting(index: Int, expected: Int): Int = {
+ val start = starts(index)
+ val first = in.buf(start).toInt
+ if (first == expected) start + 1
+ else this errorBadTag start
+ }
+
+ /** Return the name found at given index. */
+ def getName(index: Int): Name = (
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ else values(index) match {
case name: Name => name
- case null =>
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val name = newTermName(in.buf, start + 3, in.getChar(start + 1))
- values(index) = name
- name
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ recordAtIndex(newTermName(in.buf, start + 2, in.getChar(start).toInt), index)
}
- }
+ )
/** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
def getExternalName(index: Int): Name = {
@@ -182,28 +193,23 @@ abstract class ClassfileParser {
def getClassSymbol(index: Int): Symbol = {
if (index <= 0 || len <= index) errorBadIndex(index)
- var c = values(index).asInstanceOf[Symbol]
- if (c eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- val name = getExternalName(in.getChar(start + 1))
- if (nme.isModuleName(name))
- c = rootMirror.getModuleByName(name.dropModule)
- else
- c = classNameToSymbol(name)
-
- values(index) = c
+ values(index) match {
+ case sym: Symbol => sym
+ case _ =>
+ val result = getClassName(index) match {
+ case name if nme.isModuleName(name) => rootMirror getModuleByName name.dropModule
+ case name => classNameToSymbol(name)
+ }
+ recordAtIndex(result, index)
}
- c
}
/** Return the external name of the class info structure found at 'index'.
* Use 'getClassSymbol' if the class is sure to be a top-level class.
*/
def getClassName(index: Int): Name = {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- getExternalName(in.getChar(start + 1))
+ val start = firstExpecting(index, CONSTANT_CLASS)
+ getExternalName((in getChar start).toInt)
}
/** Return the symbol of the class member at `index`.
@@ -224,18 +230,16 @@ abstract class ClassfileParser {
if (first != CONSTANT_FIELDREF &&
first != CONSTANT_METHODREF &&
first != CONSTANT_INTFMETHODREF) errorBadTag(start)
- val ownerTpe = getClassOrArrayType(in.getChar(start + 1))
+ val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
- val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
+ val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
forceMangledName(tpe0.typeSymbol.name, module = false)
- val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
-// println("new tpe: " + tpe + " at phase: " + phase)
-
+ val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
if (name == nme.MODULE_INSTANCE_FIELD) {
- val index = in.getChar(start + 1)
- val name = getExternalName(in.getChar(starts(index) + 1))
+ val index = in.getChar(start + 1).toInt
+ val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
//assert(name.endsWith("$"), "Not a module class: " + name)
f = forceMangledName(name dropRight 1, module = true)
if (f == NoSymbol)
@@ -243,14 +247,12 @@ abstract class ClassfileParser {
} else {
val origName = nme.unexpandedName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
-// println("\t" + owner.info.member(name).tpe.widen + " =:= " + tpe)
f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
if (f == NoSymbol)
f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
if (ownerTpe.typeSymbol.isImplClass) {
-// println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe)
f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
@@ -261,11 +263,13 @@ abstract class ClassfileParser {
f setInfo tpe
log("created fake member " + f.fullName)
}
-// println("\townerTpe.decls: " + ownerTpe.decls)
-// println("Looking for: " + name + ": " + tpe + " inside: " + ownerTpe.typeSymbol + "\n\tand found: " + ownerTpe.members)
}
}
- assert(f != NoSymbol, "could not find\n " + name + ": " + tpe + "\ninside:\n " + ownerTpe.members.mkString(", "))
+ assert(f != NoSymbol,
+ s"could not find $name: $tpe in $ownerTpe" + (
+ if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
+ )
+ )
values(index) = f
}
f
@@ -279,94 +283,67 @@ abstract class ClassfileParser {
*/
private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
if (index <= 0 || len <= index) errorBadIndex(index)
- var p = values(index).asInstanceOf[(Name, Type)]
- if (p eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
- val name = getName(in.getChar(start + 1).toInt)
- // create a dummy symbol for method types
- val dummySym = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos)
- var tpe = getType(dummySym, in.getChar(start + 3).toInt)
-
- // fix the return type, which is blindly set to the class currently parsed
- if (name == nme.CONSTRUCTOR)
- tpe match {
- case MethodType(formals, restpe) =>
- tpe = MethodType(formals, ownerTpe)
+ values(index) match {
+ case p: ((Name @unchecked, Type @unchecked)) => p
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_NAMEANDTYPE)
+ val name = getName(in.getChar(start).toInt)
+ // create a dummy symbol for method types
+ val dummy = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos)
+ val tpe = getType(dummy, in.getChar(start + 2).toInt)
+ // fix the return type, which is blindly set to the class currently parsed
+ val restpe = tpe match {
+ case MethodType(formals, _) if name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe)
+ case _ => tpe
}
-
- p = (name, tpe)
+ ((name, restpe))
}
- p
}
/** Return the type of a class constant entry. Since
* arrays are considered to be class types, they might
* appear as entries in 'newarray' or 'cast' opcodes.
*/
- def getClassOrArrayType(index: Int): Type = {
+ def getClassOrArrayType(index: Int): Type = (
if (index <= 0 || len <= index) errorBadIndex(index)
- val value = values(index)
- var c: Type = null
- if (value eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- val name = getExternalName(in.getChar(start + 1))
- if (name.charAt(0) == ARRAY_TAG) {
- c = sigToType(null, name)
- values(index) = c
- } else {
- val sym = classNameToSymbol(name)
- /*if (name.endsWith("$")) definitions.getModule(name.subName(0, name.length - 1))
- else if (name.endsWith("$class")) definitions.getModule(name)
- else definitions.getClass(name)*/
- values(index) = sym
- c = sym.tpe
- }
- } else c = value match {
- case tp: Type => tp
- case cls: Symbol => cls.tpe
+ else values(index) match {
+ case tp: Type => tp
+ case cls: Symbol => cls.tpe_*
+ case _ =>
+ val name = getClassName(index)
+ name charAt 0 match {
+ case ARRAY_TAG => recordAtIndex(sigToType(null, name), index)
+ case _ => recordAtIndex(classNameToSymbol(name), index).tpe_*
+ }
}
- c
- }
-
- def getType(index: Int): Type = getType(null, index)
-
- def getType(sym: Symbol, index: Int): Type =
- sigToType(sym, getExternalName(index))
+ )
- def getSuperClass(index: Int): Symbol =
- if (index == 0) definitions.AnyClass else getClassSymbol(index)
+ def getType(index: Int): Type = getType(null, index)
+ def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index))
+ def getSuperClass(index: Int): Symbol = if (index == 0) AnyClass else getClassSymbol(index)
- def getConstant(index: Int): Constant = {
+ private def createConstant(index: Int): Constant = {
+ val start = starts(index)
+ Constant((in.buf(start).toInt: @switch) match {
+ case CONSTANT_STRING => getName(in.getChar(start + 1).toInt).toString
+ case CONSTANT_INTEGER => in.getInt(start + 1)
+ case CONSTANT_FLOAT => in.getFloat(start + 1)
+ case CONSTANT_LONG => in.getLong(start + 1)
+ case CONSTANT_DOUBLE => in.getDouble(start + 1)
+ case CONSTANT_CLASS => getClassOrArrayType(index).typeSymbol.tpe_* // !!! Is this necessary or desirable?
+ case _ => errorBadTag(start)
+ })
+ }
+ def getConstant(index: Char): Constant = getConstant(index.toInt)
+ def getConstant(index: Int): Constant = (
if (index <= 0 || len <= index) errorBadIndex(index)
- var value = values(index)
- if (value eq null) {
- val start = starts(index)
- value = (in.buf(start).toInt: @switch) match {
- case CONSTANT_STRING =>
- Constant(getName(in.getChar(start + 1).toInt).toString)
- case CONSTANT_INTEGER =>
- Constant(in.getInt(start + 1))
- case CONSTANT_FLOAT =>
- Constant(in.getFloat(start + 1))
- case CONSTANT_LONG =>
- Constant(in.getLong(start + 1))
- case CONSTANT_DOUBLE =>
- Constant(in.getDouble(start + 1))
- case CONSTANT_CLASS =>
- getClassOrArrayType(index).typeSymbol
- case _ =>
- errorBadTag(start)
- }
- values(index) = value
+ else values(index) match {
+ case const: Constant => const
+ case sym: Symbol => Constant(sym.tpe_*)
+ case tpe: Type => Constant(tpe)
+ case _ => recordAtIndex(createConstant(index), index)
}
- value match {
- case ct: Constant => ct
- case cls: Symbol => Constant(cls.tpe_*)
- case arr: Type => Constant(arr)
- }
- }
+ )
private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
val decodedLength = ByteCodecs.decode(bytes)
@@ -375,46 +352,41 @@ abstract class ClassfileParser {
arr
}
- def getBytes(index: Int): Array[Byte] = {
+ def getBytes(index: Int): Array[Byte] = (
if (index <= 0 || len <= index) errorBadIndex(index)
- var value = values(index).asInstanceOf[Array[Byte]]
- if (value eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
- val bytes = new Array[Byte](len)
- System.arraycopy(in.buf, start + 3, bytes, 0, len)
- value = getSubArray(bytes)
- values(index) = value
+ else values(index) match {
+ case xs: Array[Byte] => xs
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ val len = (in getChar start).toInt
+ val bytes = new Array[Byte](len)
+ System.arraycopy(in.buf, start + 2, bytes, 0, len)
+ recordAtIndex(getSubArray(bytes), index)
}
- value
- }
+ )
def getBytes(indices: List[Int]): Array[Byte] = {
- assert(!indices.isEmpty, indices)
- var value = values(indices.head).asInstanceOf[Array[Byte]]
- if (value eq null) {
- val bytesBuffer = ArrayBuffer.empty[Byte]
- for (index <- indices) {
- if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
- bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
- }
- value = getSubArray(bytesBuffer.toArray)
- values(indices.head) = value
+ val head = indices.head
+ values(head) match {
+ case xs: Array[Byte] => xs
+ case _ =>
+ val arr: Array[Byte] = indices.toArray flatMap { index =>
+ if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ val len = (in getChar start).toInt
+ in.buf drop start + 2 take len
+ }
+ recordAtIndex(getSubArray(arr), head)
}
- value
}
/** Throws an exception signaling a bad constant index. */
private def errorBadIndex(index: Int) =
- throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
+ abort(s"bad constant pool index: $index at pos: ${in.bp}")
/** Throws an exception signaling a bad tag at given address. */
private def errorBadTag(start: Int) =
- throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start)
+ abort("bad constant pool tag ${in.buf(start)} at byte $start")
}
/** Try to force the chain of enclosing classes for the given name. Otherwise
@@ -493,36 +465,35 @@ abstract class ClassfileParser {
catch { case _: FatalError => loadClassSymbol(name) }
}
- var sawPrivateConstructor = false
-
def parseClass() {
- val jflags = in.nextChar
- val isAnnotation = hasAnnotation(jflags)
- val sflags = toScalaClassFlags(jflags)
- val nameIdx = in.nextChar
- currentClass = pool.getClassName(nameIdx)
+ val jflags = readClassFlags()
+ val sflags = jflags.toScalaFlags
+ val nameIdx = u2
+ currentClass = pool.getClassName(nameIdx)
/* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
* Updates the read pointer of 'in'. */
def parseParents: List[Type] = {
if (isScala) {
- in.nextChar // skip superclass
- val ifaces = in.nextChar
- in.bp += ifaces * 2 // .. and iface count interfaces
- List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
+ u2 // skip superclass
+ val ifaces = u2
+ in.bp += ifaces * 2 // .. and iface count interfaces
+ List(AnyRefTpe) // dummy superclass, will be replaced by pickled information
}
else raiseLoaderLevel {
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe_*
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe_*
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ val superType = if (jflags.isAnnotation) { u2; AnnotationClass.tpe }
+ else pool.getSuperClass(u2).tpe_*
+ val ifaceCount = u2
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(u2).tpe_*
+ if (jflags.isAnnotation) ifaces ::= ClassfileAnnotationClass.tpe
superType :: ifaces
}
}
- val c = if (currentIsTopLevel) pool.getClassSymbol(nameIdx) else clazz
- if (currentIsTopLevel) {
+ val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (SI-7532)
+
+ val c = if (isTopLevel) pool.getClassSymbol(nameIdx) else clazz
+ if (isTopLevel) {
if (c != clazz) {
if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
else mismatchError(c)
@@ -546,21 +517,20 @@ abstract class ClassfileParser {
skipMembers() // methods
if (!isScala) {
clazz setFlag sflags
- importPrivateWithinFromJavaFlags(clazz, jflags)
- importPrivateWithinFromJavaFlags(staticModule, jflags)
- clazz.setInfo(classInfo)
+ propagatePackageBoundary(jflags, clazz, staticModule)
+ clazz setInfo classInfo
moduleClass setInfo staticInfo
- staticModule.setInfo(moduleClass.tpe)
- staticModule.setFlag(JAVA)
- staticModule.moduleClass.setFlag(JAVA)
+ staticModule setInfo moduleClass.tpe
+ staticModule setFlag JAVA
+ staticModule.moduleClass setFlag JAVA
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
def queueLoad() {
in.bp = curbp
- 0 until in.nextChar foreach (_ => parseField())
+ 0 until u2 foreach (_ => parseField())
sawPrivateConstructor = false
- 0 until in.nextChar foreach (_ => parseMethod())
+ 0 until u2 foreach (_ => parseMethod())
val needsConstructor = (
!sawPrivateConstructor
&& !(instanceScope containsName nme.CONSTRUCTOR)
@@ -586,68 +556,71 @@ abstract class ClassfileParser {
def addEnclosingTParams(clazz: Symbol) {
var sym = clazz.owner
while (sym.isClass && !sym.isModuleClass) {
-// println("adding tparams of " + sym)
- for (t <- sym.tpe.typeArgs) {
-// println("\tadding " + (t.typeSymbol.name + "->" + t.typeSymbol))
+ for (t <- sym.tpe.typeArgs)
classTParams = classTParams + (t.typeSymbol.name -> t.typeSymbol)
- }
+
sym = sym.owner
}
}
def parseField() {
- val jflags = in.nextChar
- val sflags = toScalaFieldFlags(jflags)
- if ((sflags & PRIVATE) != 0L && !global.settings.optimise) {
+ val jflags = readFieldFlags()
+ val sflags = jflags.toScalaFlags
+
+ if ((sflags & PRIVATE) != 0L && !optimized) {
in.skip(4); skipAttributes()
} else {
- val name = pool.getName(in.nextChar)
- val info = pool.getType(in.nextChar)
- val sym = getOwner(jflags).newValue(name.toTermName, NoPosition, sflags)
- val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val name = readName()
+ val info = readType()
+ val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags)
// Note: the info may be overrwritten later with a generic signature
// parsed from SignatureATTR
sym setInfo {
- if (isEnum) ConstantType(Constant(sym))
+ if (jflags.isEnum) ConstantType(Constant(sym))
else info
}
- importPrivateWithinFromJavaFlags(sym, jflags)
+ propagatePackageBoundary(jflags, sym)
parseAttributes(sym, info)
- getScope(jflags).enter(sym)
+ getScope(jflags) enter sym
// sealed java enums
- if (isEnum) {
+ if (jflags.isEnum) {
val enumClass = sym.owner.linkedClassOfClass
- if (!enumClass.isSealed)
- enumClass setFlag (SEALED | ABSTRACT)
-
- enumClass addChild sym
+ enumClass match {
+ case NoSymbol =>
+ devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.")
+ case linked =>
+ if (!linked.isSealed)
+ linked setFlag (SEALED | ABSTRACT)
+ linked addChild sym
+ }
}
}
}
def parseMethod() {
- val jflags = in.nextChar.toInt
- val sflags = toScalaMethodFlags(jflags)
- if (isPrivate(jflags) && !global.settings.optimise) {
- val name = pool.getName(in.nextChar)
+ val jflags = readMethodFlags()
+ val sflags = jflags.toScalaFlags
+ if (jflags.isPrivate && !optimized) {
+ val name = readName()
if (name == nme.CONSTRUCTOR)
sawPrivateConstructor = true
in.skip(2); skipAttributes()
- } else {
- if ((sflags & PRIVATE) != 0L && global.settings.optimise) {
+ }
+ else {
+ if ((sflags & PRIVATE) != 0L && optimized) {
in.skip(4); skipAttributes()
} else {
- val name = pool.getName(in.nextChar)
- val sym = getOwner(jflags).newMethod(name.toTermName, NoPosition, sflags)
- var info = pool.getType(sym, (in.nextChar))
+ val name = readName()
+ val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags)
+ var info = pool.getType(sym, u2)
if (name == nme.CONSTRUCTOR)
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
val newParams = innerClasses getEntry currentClass match {
- case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
+ case Some(entry) if !isScalaRaw && !entry.jflags.isStatic =>
/* About `clazz.owner.isPackage` below: SI-5957
* For every nested java class A$B, there are two symbols in the scala compiler.
* 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package
@@ -664,13 +637,13 @@ abstract class ClassfileParser {
}
// Note: the info may be overrwritten later with a generic signature
// parsed from SignatureATTR
- sym.setInfo(info)
- importPrivateWithinFromJavaFlags(sym, jflags)
+ sym setInfo info
+ propagatePackageBoundary(jflags, sym)
parseAttributes(sym, info)
- if ((jflags & JAVA_ACC_VARARGS) != 0) {
- sym.setInfo(arrayToRepeated(sym.info))
- }
- getScope(jflags).enter(sym)
+ if (jflags.isVarargs)
+ sym modifyInfo arrayToRepeated
+
+ getScope(jflags) enter sym
}
}
}
@@ -690,15 +663,15 @@ abstract class ClassfileParser {
def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig.charAt(index); index += 1
tag match {
- case BYTE_TAG => definitions.ByteClass.tpe
- case CHAR_TAG => definitions.CharClass.tpe
- case DOUBLE_TAG => definitions.DoubleClass.tpe
- case FLOAT_TAG => definitions.FloatClass.tpe
- case INT_TAG => definitions.IntClass.tpe
- case LONG_TAG => definitions.LongClass.tpe
- case SHORT_TAG => definitions.ShortClass.tpe
- case VOID_TAG => definitions.UnitClass.tpe
- case BOOL_TAG => definitions.BooleanClass.tpe
+ case BYTE_TAG => ByteTpe
+ case CHAR_TAG => CharTpe
+ case DOUBLE_TAG => DoubleTpe
+ case FLOAT_TAG => FloatTpe
+ case INT_TAG => IntTpe
+ case LONG_TAG => LongTpe
+ case SHORT_TAG => ShortTpe
+ case VOID_TAG => UnitTpe
+ case BOOL_TAG => BooleanTpe
case 'L' =>
def processInner(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if (!sym.isStatic) =>
@@ -723,7 +696,7 @@ abstract class ClassfileParser {
val tp = sig2type(tparams, skiptvs)
// sig2type seems to return AnyClass regardless of the situation:
// we don't want Any as a LOWER bound.
- if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+ if (tp.typeSymbol == AnyClass) TypeBounds.empty
else TypeBounds.lower(tp)
case '*' => TypeBounds.empty
}
@@ -744,7 +717,7 @@ abstract class ClassfileParser {
// or we'll create a boatload of needless existentials.
else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
// raw type - existentially quantify all type parameters
- else logResult(s"raw type from $classSym")(definitions.unsafeClassExistentialType(classSym))
+ else logResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
@@ -772,11 +745,11 @@ abstract class ClassfileParser {
// NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
// if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
// see also RestrictJavaArraysMap (when compiling java sources directly)
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) {
- elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe)) {
+ elemtp = intersectionType(List(elemtp, ObjectTpe))
}
- definitions.arrayType(elemtp)
+ arrayType(elemtp)
case '(' =>
// we need a method symbol. given in line 486 by calling getType(methodSym, ..)
assert(sym ne null, sig)
@@ -794,7 +767,7 @@ abstract class ClassfileParser {
case 'T' =>
val n = subName(';'.==).toTypeName
index += 1
- if (skiptvs) definitions.AnyClass.tpe
+ if (skiptvs) AnyTpe
else tparams(n).typeConstructor
}
} // sig2type(tparams, skiptvs)
@@ -847,28 +820,22 @@ abstract class ClassfileParser {
GenPolyType(ownTypeParams, tpe)
} // sigToType
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
- override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
- }
-
def parseAttributes(sym: Symbol, symtype: Type) {
def convertTo(c: Constant, pt: Type): Constant = {
- if (pt.typeSymbol == definitions.BooleanClass && c.tag == IntTag)
+ if (pt.typeSymbol == BooleanClass && c.tag == IntTag)
Constant(c.value != 0)
else
c convertTo pt
}
def parseAttribute() {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = readTypeName()
+ val attrLen = u4
attrName match {
case tpnme.SignatureATTR =>
if (!isScala && !isScalaRaw) {
- val sig = pool.getExternalName(in.nextChar)
+ val sig = pool.getExternalName(u2)
val newType = sigToType(sym, sig)
sym.setInfo(newType)
- if (settings.debug && settings.verbose)
- println("" + sym + "; signature = " + sig + " type = " + newType)
}
else in.skip(attrLen)
case tpnme.SyntheticATTR =>
@@ -879,16 +846,16 @@ abstract class ClassfileParser {
in.skip(attrLen)
case tpnme.DeprecatedATTR =>
val arg = Literal(Constant("see corresponding Javadoc for more information."))
- sym.addAnnotation(definitions.DeprecatedAttr, arg, Literal(Constant("")))
+ sym.addAnnotation(DeprecatedAttr, arg, Literal(Constant("")))
in.skip(attrLen)
case tpnme.ConstantValueATTR =>
- val c = pool.getConstant(in.nextChar)
+ val c = pool.getConstant(u2)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
- else println("failure to convert " + c + " to " + symtype); //debug
+ else debugwarn(s"failure to convert $c to $symtype")
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
- debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute")
+ debugwarn(s"symbol ${sym.fullName} has pickled signature in attribute")
unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
@@ -896,7 +863,7 @@ abstract class ClassfileParser {
isScalaRaw = true
// Attribute on methods of java annotation classes when that method has a default
case tpnme.AnnotationDefaultATTR =>
- sym.addAnnotation(definitions.AnnotationDefaultAttr)
+ sym.addAnnotation(AnnotationDefaultAttr)
in.skip(attrLen)
// Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case tpnme.RuntimeAnnotationATTR =>
@@ -926,21 +893,27 @@ abstract class ClassfileParser {
parseExceptions(attrLen)
case tpnme.SourceFileATTR =>
- val srcfileLeaf = pool.getName(in.nextChar).toString.trim
+ val srcfileLeaf = readName().toString.trim
val srcpath = sym.enclosingPackage match {
case NoSymbol => srcfileLeaf
case rootMirror.EmptyPackage => srcfileLeaf
case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf
}
srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
+ case tpnme.CodeATTR =>
+ if (sym.owner.isInterface) {
+ sym setFlag DEFAULTMETHOD
+ log(s"$sym in ${sym.owner} is a java8+ default method.")
+ }
+ in.skip(attrLen)
case _ =>
in.skip(attrLen)
}
}
def parseAnnotArg: Option[ClassfileAnnotArg] = {
- val tag = in.nextByte.toChar
- val index = in.nextChar
+ val tag = u1
+ val index = u2
tag match {
case STRING_TAG =>
Some(LiteralAnnotArg(Constant(pool.getName(index).toString)))
@@ -951,7 +924,7 @@ abstract class ClassfileParser {
Some(LiteralAnnotArg(Constant(pool.getType(index))))
case ENUM_TAG =>
val t = pool.getType(index)
- val n = pool.getName(in.nextChar)
+ val n = readName()
val s = t.typeSymbol.companionModule.info.decls.lookup(n)
assert(s != NoSymbol, t)
Some(LiteralAnnotArg(Constant(s)))
@@ -971,20 +944,20 @@ abstract class ClassfileParser {
}
def parseScalaSigBytes: Option[ScalaSigBytes] = {
- val tag = in.nextByte.toChar
+ val tag = u1
assert(tag == STRING_TAG, tag)
- Some(ScalaSigBytes(pool getBytes in.nextChar))
+ Some(ScalaSigBytes(pool getBytes u2))
}
def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
- val tag = in.nextByte.toChar
+ val tag = u1
assert(tag == ARRAY_TAG, tag)
- val stringCount = in.nextChar
+ val stringCount = u2
val entries =
for (i <- 0 until stringCount) yield {
- val stag = in.nextByte.toChar
+ val stag = u1
assert(stag == STRING_TAG, stag)
- in.nextChar.toInt
+ u2
}
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
@@ -992,22 +965,22 @@ abstract class ClassfileParser {
/* Parse and return a single annotation. If it is malformed,
* return None.
*/
- def parseAnnotation(attrNameIndex: Char): Option[AnnotationInfo] = try {
+ def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try {
val attrType = pool.getType(attrNameIndex)
- val nargs = in.nextChar
+ val nargs = u2
val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)]
var hasError = false
for (i <- 0 until nargs) {
- val name = pool.getName(in.nextChar)
+ val name = readName()
// The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is
// available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature
// is encoded as a string because of limitations in the Java class file format.
- if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
+ if ((attrType == ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
parseScalaSigBytes match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
}
- else if ((attrType == definitions.ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
+ else if ((attrType == ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
parseScalaLongSigBytes match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
@@ -1020,16 +993,18 @@ abstract class ClassfileParser {
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
- } catch {
- case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
- case ex: Throwable =>
+ }
+ catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case ex: java.lang.Error => throw ex
+ case ex: Throwable =>
// We want to be robust when annotations are unavailable, so the very least
// we can do is warn the user about the exception
// There was a reference to ticket 1135, but that is outdated: a reference to a class not on
// the classpath would *not* end up here. A class not found is signaled
// with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
// and that should never be swallowed silently.
- warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ warning(s"Caught: $ex while parsing annotations in ${in.file}")
if (settings.debug) ex.printStackTrace()
None // ignore malformed annotations
@@ -1040,10 +1015,10 @@ abstract class ClassfileParser {
* thrown by a method.
*/
def parseExceptions(len: Int) {
- val nClasses = in.nextChar
+ val nClasses = u2
for (n <- 0 until nClasses) {
// FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
- val cls = pool.getClassSymbol(in.nextChar.toInt)
+ val cls = pool.getClassSymbol(u2)
// we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
// and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
cls.initialize
@@ -1054,13 +1029,13 @@ abstract class ClassfileParser {
/* Parse a sequence of annotations and attaches them to the
* current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
def parseAnnotations(len: Int): Option[AnnotationInfo] = {
- val nAttr = in.nextChar
+ val nAttr = u2
var scalaSigAnnot: Option[AnnotationInfo] = None
for (n <- 0 until nAttr)
- parseAnnotation(in.nextChar) match {
- case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) =>
+ parseAnnotation(u2) match {
+ case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) =>
scalaSigAnnot = Some(scalaSig)
- case Some(scalaSig) if (scalaSig.atp == definitions.ScalaLongSignatureAnnotation.tpe) =>
+ case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) =>
scalaSigAnnot = Some(scalaSig)
case Some(annot) =>
sym.addAnnotation(annot)
@@ -1070,7 +1045,7 @@ abstract class ClassfileParser {
}
// begin parseAttributes
- for (i <- 0 until in.nextChar) parseAttribute()
+ for (i <- 0 until u2) parseAttribute()
}
/** Enter own inner classes in the right scope. It needs the scopes to be set up,
@@ -1080,11 +1055,12 @@ abstract class ClassfileParser {
def className(name: Name): Name =
name.subName(name.lastPos('.') + 1, name.length)
- def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
+ def jflags = entry.jflags
val completer = new global.loaders.ClassfileLoader(file)
val name = entry.originalName
- val sflags = toScalaClassFlags(jflags)
- val owner = getOwner(jflags)
+ val sflags = jflags.toScalaFlags
+ val owner = ownerForFlags(jflags)
val scope = getScope(jflags)
val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
@@ -1113,7 +1089,7 @@ abstract class ClassfileParser {
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
- enterClassAndModule(entry, file, entry.jflags)
+ enterClassAndModule(entry, file)
}
}
}
@@ -1126,10 +1102,10 @@ abstract class ClassfileParser {
skipSuperclasses()
skipMembers() // fields
skipMembers() // methods
- val attrs = in.nextChar
+ val attrs = u2
for (i <- 0 until attrs) {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = readTypeName()
+ val attrLen = u4
attrName match {
case tpnme.SignatureATTR =>
in.skip(attrLen)
@@ -1143,9 +1119,10 @@ abstract class ClassfileParser {
case tpnme.ScalaATTR =>
isScalaRaw = true
case tpnme.InnerClassesATTR if !isScala =>
- val entries = in.nextChar.toInt
+ val entries = u2
for (i <- 0 until entries) {
- val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+ val innerIndex, outerIndex, nameIndex = u2
+ val jflags = readInnerClassFlags()
if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
}
@@ -1157,14 +1134,13 @@ abstract class ClassfileParser {
}
/** An entry in the InnerClasses attribute of this class file. */
- case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
+ case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: JavaAccFlags) {
def externalName = pool getClassName external
def outerName = pool getClassName outer
def originalName = pool getName name
- def isStatic = ClassfileParser.this.isStatic(jflags)
def isModule = originalName.isTermName
- def scope = if (isStatic) staticScope else instanceScope
- def enclosing = if (isStatic) enclModule else enclClass
+ def scope = if (jflags.isStatic) staticScope else instanceScope
+ def enclosing = if (jflags.isStatic) enclModule else enclClass
// The name of the outer class, without its trailing $ if it has one.
private def strippedOuter = nme stripModuleSuffix outerName
@@ -1208,7 +1184,7 @@ abstract class ClassfileParser {
else enclosing.info member name
)
enteringTyperIfPossible(getMember)
- /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ /* There used to be an assertion that this result is not NoSymbol; changing it to an error
* revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
* in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
* thought it wasn't triggering, I removed it entirely.
@@ -1216,6 +1192,9 @@ abstract class ClassfileParser {
}
}
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
+ override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
+ }
class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun)
@@ -1223,16 +1202,16 @@ abstract class ClassfileParser {
}
def skipAttributes() {
- var attrCount: Int = in.nextChar
+ var attrCount: Int = u2
while (attrCount > 0) {
in skip 2
- in skip in.nextInt
+ in skip u4
attrCount -= 1
}
}
def skipMembers() {
- var memberCount: Int = in.nextChar
+ var memberCount: Int = u2
while (memberCount > 0) {
in skip 6
skipAttributes()
@@ -1242,17 +1221,10 @@ abstract class ClassfileParser {
def skipSuperclasses() {
in.skip(2) // superclass
- val ifaces = in.nextChar
+ val ifaces = u2
in.skip(2 * ifaces)
}
- protected def getOwner(flags: Int): Symbol =
- if (isStatic(flags)) moduleClass else clazz
-
- protected def getScope(flags: Int): Scope =
- if (isStatic(flags)) staticScope else instanceScope
-
- private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
- private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
- private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+ protected def getScope(flags: JavaAccFlags): Scope =
+ if (flags.isStatic) staticScope else instanceScope
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 80a810703c..01a117895f 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -3,13 +3,15 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package symtab
package classfile
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
import ClassfileConstants._
+import scala.reflect.internal.JavaAccFlags
/** ICode reader from Java bytecode.
*
@@ -45,26 +47,19 @@ abstract class ICodeReader extends ClassfileParser {
(staticCode, instanceCode)
}
- /** If we're parsing a scala module, the owner of members is always
- * the module symbol.
- */
- override def getOwner(jflags: Int): Symbol =
- if (isScalaModule) this.staticModule
- else super.getOwner(jflags)
-
override def parseClass() {
this.instanceCode = new IClass(clazz)
this.staticCode = new IClass(staticModule)
- in.nextChar
- pool getClassSymbol in.nextChar
+ u2
+ pool getClassSymbol u2
parseInnerClasses()
in.skip(2) // super class
- in.skip(2 * in.nextChar) // interfaces
- val fieldCount = in.nextChar
+ in.skip(2 * u2) // interfaces
+ val fieldCount = u2
for (i <- 0 until fieldCount) parseField()
- val methodCount = in.nextChar
+ val methodCount = u2
for (i <- 0 until methodCount) parseMethod()
instanceCode.methods = instanceCode.methods.reverse
staticCode.methods = staticCode.methods.reverse
@@ -76,25 +71,31 @@ abstract class ICodeReader extends ClassfileParser {
skipAttributes()
}
- private def parseMember(field: Boolean): (Int, Symbol) = {
- val jflags = in.nextChar
- val name = pool getName in.nextChar
- val owner = getOwner(jflags)
- val dummySym = owner.newMethod(name.toTermName, owner.pos, toScalaMethodFlags(jflags))
+ private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = {
+ val jflags = JavaAccFlags(u2)
+ val name = pool getName u2
+ /* If we're parsing a scala module, the owner of members is always
+ * the module symbol.
+ */
+ val owner = (
+ if (isScalaModule) staticModule
+ else if (jflags.isStatic) moduleClass
+ else clazz
+ )
+ val dummySym = owner.newMethod(name.toTermName, owner.pos, jflags.toScalaFlags)
try {
- val ch = in.nextChar
+ val ch = u2
val tpe = pool.getType(dummySym, ch)
if ("<clinit>" == name.toString)
(jflags, NoSymbol)
else {
- val owner = getOwner(jflags)
var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe))
if (sym == NoSymbol)
sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- sym = if (field) owner.newValue(name.toTermName, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+ sym = if (field) owner.newValue(name.toTermName, owner.pos, jflags.toScalaFlags) else dummySym
sym setInfoAndEnter tpe
log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
@@ -126,9 +127,9 @@ abstract class ICodeReader extends ClassfileParser {
this.method = new IMethod(sym)
this.method.returnType = toTypeKind(sym.tpe.resultType)
getCode(jflags).addMethod(this.method)
- if ((jflags & JAVA_ACC_NATIVE) != 0)
+ if (jflags.isNative)
this.method.native = true
- val attributeCount = in.nextChar
+ val attributeCount = u2
for (i <- 0 until attributeCount) parseAttribute()
} else {
debuglog("Skipping non-existent method.")
@@ -142,8 +143,8 @@ abstract class ICodeReader extends ClassfileParser {
}
def parseAttribute() {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = pool.getName(u2).toTypeName
+ val attrLen = u4
attrName match {
case tpnme.CodeATTR =>
parseByteCode()
@@ -187,9 +188,9 @@ abstract class ICodeReader extends ClassfileParser {
/** Parse java bytecode into ICode */
def parseByteCode() {
- maxStack = in.nextChar
- maxLocals = in.nextChar
- val codeLength = in.nextInt
+ maxStack = u2
+ maxLocals = u2
+ val codeLength = u4
val code = new LinearCode
def parseInstruction() {
@@ -200,7 +201,7 @@ abstract class ICodeReader extends ClassfileParser {
/* Parse 16 bit jump target. */
def parseJumpTarget = {
size += 2
- val offset = in.nextChar.toShort
+ val offset = u2.toShort
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target)
target
@@ -209,14 +210,13 @@ abstract class ICodeReader extends ClassfileParser {
/* Parse 32 bit jump target. */
def parseJumpTargetW: Int = {
size += 4
- val offset = in.nextInt
+ val offset = u4
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset)
target
}
- val instr = toUnsignedByte(in.nextByte)
- instr match {
+ u1 match {
case JVM.nop => parseInstruction()
case JVM.aconst_null => code emit CONSTANT(Constant(null))
case JVM.iconst_m1 => code emit CONSTANT(Constant(-1))
@@ -235,17 +235,17 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dconst_0 => code emit CONSTANT(Constant(0.0))
case JVM.dconst_1 => code emit CONSTANT(Constant(1.0))
- case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size += 1
- case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size += 2
- case JVM.ldc => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size += 1
- case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
- case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
+ case JVM.bipush => code.emit(CONSTANT(Constant(u1))); size += 1
+ case JVM.sipush => code.emit(CONSTANT(Constant(u2))); size += 2
+ case JVM.ldc => code.emit(CONSTANT(pool.getConstant(u1))); size += 1
+ case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+ case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u1, INT))); size += 1
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u1, LONG))); size += 1
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u1, FLOAT))); size += 1
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
case JVM.aload =>
- val local = in.nextByte.toInt; size += 1
+ val local = u1.toInt; size += 1
if (local == 0 && !method.isStatic)
code.emit(THIS(method.symbol.owner))
else
@@ -285,11 +285,11 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.caload => code.emit(LOAD_ARRAY_ITEM(CHAR))
case JVM.saload => code.emit(LOAD_ARRAY_ITEM(SHORT))
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, ObjectReference))); size += 1
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u1, INT))); size += 1
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u1, LONG))); size += 1
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u1, FLOAT))); size += 1
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u1, ObjectReference))); size += 1
case JVM.istore_0 => code.emit(STORE_LOCAL(code.getLocal(0, INT)))
case JVM.istore_1 => code.emit(STORE_LOCAL(code.getLocal(1, INT)))
case JVM.istore_2 => code.emit(STORE_LOCAL(code.getLocal(2, INT)))
@@ -373,9 +373,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.lxor => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG)))
case JVM.iinc =>
size += 2
- val local = code.getLocal(in.nextByte, INT)
+ val local = code.getLocal(u1, INT)
code.emit(LOAD_LOCAL(local))
- code.emit(CONSTANT(Constant(in.nextByte)))
+ code.emit(CONSTANT(Constant(u1)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
code.emit(STORE_LOCAL(local))
@@ -425,14 +425,14 @@ abstract class ICodeReader extends ClassfileParser {
size += padding
in.bp += padding
assert((pc + size % 4) != 0, pc)
-/* var byte1 = in.nextByte; size += 1;
- while (byte1 == 0) { byte1 = in.nextByte; size += 1; }
- val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
+/* var byte1 = u1; size += 1;
+ while (byte1 == 0) { byte1 = u1; size += 1; }
+ val default = byte1 << 24 | u1 << 16 | u1 << 8 | u1;
size = size + 3
*/
- val default = pc + in.nextInt; size += 4
- val low = in.nextInt
- val high = in.nextInt
+ val default = pc + u4; size += 4
+ val low = u4
+ val high = u4
size += 8
assert(low <= high, "Value low not <= high for tableswitch.")
@@ -445,13 +445,13 @@ abstract class ICodeReader extends ClassfileParser {
size += padding
in.bp += padding
assert((pc + size % 4) != 0, pc)
- val default = pc + in.nextInt; size += 4
- val npairs = in.nextInt; size += 4
+ val default = pc + u4; size += 4
+ val npairs = u4; size += 4
var tags: List[List[Int]] = Nil
var targets: List[Int] = Nil
var i = 0
while (i < npairs) {
- tags = List(in.nextInt) :: tags; size += 4
+ tags = List(u4) :: tags; size += 4
targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself
i += 1
}
@@ -466,35 +466,35 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.return_ => code.emit(RETURN(UNIT))
case JVM.getstatic =>
- val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2
+ val field = pool.getMemberSymbol(u2, static = true); size += 2
if (field.hasModuleFlag)
code emit LOAD_MODULE(field)
else
code emit LOAD_FIELD(field, isStatic = true)
case JVM.putstatic =>
- val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2
+ val field = pool.getMemberSymbol(u2, static = true); size += 2
code.emit(STORE_FIELD(field, isStatic = true))
case JVM.getfield =>
- val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ val field = pool.getMemberSymbol(u2, static = false); size += 2
code.emit(LOAD_FIELD(field, isStatic = false))
case JVM.putfield =>
- val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ val field = pool.getMemberSymbol(u2, static = false); size += 2
code.emit(STORE_FIELD(field, isStatic = false))
case JVM.invokevirtual =>
- val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ val m = pool.getMemberSymbol(u2, static = false); size += 2
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokeinterface =>
- val m = pool.getMemberSymbol(in.nextChar, static = false); size += 4
+ val m = pool.getMemberSymbol(u2, static = false); size += 4
in.skip(2)
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokespecial =>
- val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ val m = pool.getMemberSymbol(u2, static = false); size += 2
val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true)
else SuperCall(m.owner.name)
code.emit(CALL_METHOD(m, style))
case JVM.invokestatic =>
- val m = pool.getMemberSymbol(in.nextChar, static = true); size += 2
+ val m = pool.getMemberSymbol(u2, static = true); size += 2
if (isBox(m))
code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
else if (isUnbox(m))
@@ -505,15 +505,15 @@ abstract class ICodeReader extends ClassfileParser {
// TODO, this is just a place holder. A real implementation must parse the class constant entry
debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
containsInvokeDynamic = true
- val poolEntry = in.nextChar
+ val poolEntry = in.nextChar.toInt
in.skip(2)
code.emit(INVOKE_DYNAMIC(poolEntry))
case JVM.new_ =>
- code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
+ code.emit(NEW(REFERENCE(pool.getClassSymbol(u2))))
size += 2
case JVM.newarray =>
- val kind = in.nextByte match {
+ val kind = u1 match {
case T_BOOLEAN => BOOL
case T_CHAR => CHAR
case T_FLOAT => FLOAT
@@ -527,35 +527,35 @@ abstract class ICodeReader extends ClassfileParser {
code.emit(CREATE_ARRAY(kind, 1))
case JVM.anewarray =>
- val tpe = pool.getClassOrArrayType(in.nextChar); size += 2
+ val tpe = pool.getClassOrArrayType(u2); size += 2
code.emit(CREATE_ARRAY(toTypeKind(tpe), 1))
case JVM.arraylength => code.emit(CALL_PRIMITIVE(ArrayLength(ObjectReference))); // the kind does not matter
case JVM.athrow => code.emit(THROW(definitions.ThrowableClass))
case JVM.checkcast =>
- code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
case JVM.instanceof =>
- code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
case JVM.monitorenter => code.emit(MONITOR_ENTER())
case JVM.monitorexit => code.emit(MONITOR_EXIT())
case JVM.wide =>
size += 1
- toUnsignedByte(in.nextByte) match {
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
- case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
+ u1 match {
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u2, INT))); size += 2
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u2, LONG))); size += 2
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u2, FLOAT))); size += 2
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+ case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u2, INT))); size += 2
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u2, LONG))); size += 2
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u2, FLOAT))); size += 2
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
case JVM.ret => sys.error("Cannot handle jsr/ret")
case JVM.iinc =>
size += 4
- val local = code.getLocal(in.nextChar, INT)
- code.emit(CONSTANT(Constant(in.nextChar)))
+ val local = code.getLocal(u2, INT)
+ code.emit(CONSTANT(Constant(u2)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
code.emit(STORE_LOCAL(local))
case _ => sys.error("Invalid 'wide' operand")
@@ -563,8 +563,8 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.multianewarray =>
size += 3
- val tpe = toTypeKind(pool getClassOrArrayType in.nextChar)
- val dim = in.nextByte
+ val tpe = toTypeKind(pool getClassOrArrayType u2)
+ val dim = u1
// assert(dim == 1, "Cannot handle multidimensional arrays yet.")
code emit CREATE_ARRAY(tpe, dim)
@@ -590,14 +590,14 @@ abstract class ICodeReader extends ClassfileParser {
pc = 0
while (pc < codeLength) parseInstruction()
- val exceptionEntries = in.nextChar.toInt
+ val exceptionEntries = u2.toInt
code.containsEHs = (exceptionEntries != 0)
var i = 0
while (i < exceptionEntries) {
// skip start end PC
in.skip(4)
// read the handler PC
- code.jmpTargets += in.nextChar
+ code.jmpTargets += u2
// skip the exception type
in.skip(2)
i += 1
@@ -633,10 +633,8 @@ abstract class ICodeReader extends ClassfileParser {
/** Return the icode class that should include members with the given flags.
* There are two possible classes, the static part and the instance part.
*/
- def getCode(flags: Int): IClass =
- if (isScalaModule) staticCode
- else if ((flags & JAVA_ACC_STATIC) != 0) staticCode
- else instanceCode
+ def getCode(flags: JavaAccFlags): IClass =
+ if (isScalaModule || flags.isStatic) staticCode else instanceCode
class LinearCode {
val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
@@ -921,6 +919,7 @@ abstract class ICodeReader extends ClassfileParser {
}
/** Return the local at given index, with the given type. */
+ def getLocal(idx: Char, kind: TypeKind): Local = getLocal(idx.toInt, kind)
def getLocal(idx: Int, kind: TypeKind): Local = {
assert(idx < maxLocals, "Index too large for local variable.")
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 94880c4b2e..3265af9f5b 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -308,7 +308,6 @@ abstract class Pickler extends SubComponent {
putTree(definition)
*/
case Template(parents, self, body) =>
- writeNat(parents.length)
putTrees(parents)
putTree(self)
putTrees(body)
@@ -633,7 +632,7 @@ abstract class Pickler extends SubComponent {
case c @ Constant(_) =>
if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
- else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue))
+ else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue).toLong)
else if (c.tag == DoubleTag) writeLong(doubleToLongBits(c.doubleValue))
else if (c.tag == StringTag) writeRef(newTermName(c.stringValue))
else if (c.tag == ClazzTag) writeRef(c.typeValue)
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index ba75f40a3a..0dcf4d00b7 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -143,7 +143,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
decls enter (
implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
- setInfo MethodType(Nil, UnitClass.tpe)
+ setInfo MethodType(Nil, UnitTpe)
)
}
@@ -189,7 +189,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case ClassInfoType(parents, decls, _) =>
assert(phase == implClassPhase, tp)
// Impl class parents: Object first, matching interface last.
- val implParents = ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
+ val implParents = ObjectTpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
ClassInfoType(implParents, implDecls(implSym, decls), implSym)
case PolyType(_, restpe) =>
implType(restpe)
@@ -209,7 +209,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case Nil => Nil
case hd :: tl =>
assert(!hd.typeSymbol.isTrait, clazz)
- if (clazz.isTrait) erasedTypeRef(ObjectClass) :: tl
+ if (clazz.isTrait) ObjectTpe :: tl
else parents
}
val decls1 = scopeTransform(clazz)(
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index ac18e5ba4f..a37ef29355 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -32,19 +32,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
newStaticInits.clear()
symbolsStoredAsStatic.clear()
}
- private def savingStatics[T](body: => T): T = {
- val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
- val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
- val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
- val result = body
-
- clearStatics()
- newStaticMembers ++= savedNewStaticMembers
- newStaticInits ++= savedNewStaticInits
- symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
-
- result
- }
private def transformTemplate(tree: Tree) = {
val Template(_, _, body) = tree
clearStatics()
@@ -60,15 +47,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private var localTyper: analyzer.Typer = null
- private object MethodDispatchType extends scala.Enumeration {
- val NO_CACHE, MONO_CACHE, POLY_CACHE = Value
- }
- import MethodDispatchType.{ NO_CACHE, MONO_CACHE, POLY_CACHE }
- private def dispatchType() = settings.refinementMethodDispatch.value match {
- case "no-cache" => NO_CACHE
- case "mono-cache" => MONO_CACHE
- case "poly-cache" => POLY_CACHE
- }
private def typedWithPos(pos: Position)(tree: Tree) =
localTyper.typedPos(pos)(tree)
@@ -100,7 +78,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (isFinal) FINAL else 0
)
- val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
+ val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags.toLong) setInfoAndEnter forType
if (!isFinal)
varSym.addAnnotation(VolatileAttr)
@@ -126,147 +104,79 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
- /* ... */
- def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType() match {
- case NO_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- def reflMethod$Method(forReceiver: JClass[_]): JMethod =
- forReceiver.getMethod("xyz", reflParams$Cache)
-
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- addStaticMethodToClass((_, forReceiverSym) =>
- gen.mkMethodCall(REF(forReceiverSym), Class_getMethod, Nil, List(LIT(method), REF(reflParamsCacheSym)))
- )
-
- case MONO_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (but with a SoftReference wrapping reflClass$Cache, similarly in the poly Cache) :
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- var reflMethod$Cache: JMethod = null
-
- var reflClass$Cache: JClass[_] = null
-
- def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- if (reflClass$Cache != forReceiver) {
- reflMethod$Cache = forReceiver.getMethod("xyz", reflParams$Cache)
- reflClass$Cache = forReceiver
- }
- reflMethod$Cache
- }
-
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- val reflMethodCacheSym: Symbol =
- addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
-
- val reflClassCacheSym: Symbol =
- addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
+ def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = {
+ /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
+ (SoftReference so that it does not interfere with classloader garbage collection,
+ see ticket #2365 for details):
- def isCacheEmpty(receiver: Symbol): Tree =
- reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
+ var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
- addStaticMethodToClass((_, forReceiverSym) =>
- BLOCK(
- IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
- REF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym))) ,
- REF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
- UNIT
- ) ENDIF,
- REF(reflMethodCacheSym)
- )
- )
-
- case POLY_CACHE =>
+ var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (SoftReference so that it does not interfere with classloader garbage collection, see ticket
- #2365 for details):
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
+ def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
+ var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ if (methodCache eq null) {
+ methodCache = new EmptyMethodCache
+ reflPoly$Cache = new SoftReference(methodCache)
+ }
+ var method: JMethod = methodCache.find(forReceiver)
+ if (method ne null)
+ return method
+ else {
+ method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
+ reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
+ return method
+ }
+ }
+ */
- def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
- if (methodCache eq null) {
- methodCache = new EmptyMethodCache
- reflPoly$Cache = new SoftReference(methodCache)
- }
- var method: JMethod = methodCache.find(forReceiver)
- if (method ne null)
- return method
- else {
- method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
- return method
- }
- }
+ val reflParamsCacheSym: Symbol =
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
- */
+ def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
+ val reflPolyCacheSym: Symbol = addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
+ def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
- def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = (
- addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- )
- def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
+ addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
+ val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
- addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
- val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
- val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+ BLOCK(
+ VAL(methodCache) === getPolyCache,
+ IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+ REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+ ) ENDIF,
+ VAL(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ IF (REF(methodSym) OBJ_NE NULL) .
+ THEN (Return(REF(methodSym)))
+ ELSE {
+ def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
+ def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
- VAL(methodCache) === getPolyCache,
- IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
- REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
- ) ENDIF,
-
- VAL(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
- IF (REF(methodSym) OBJ_NE NULL) .
- THEN (Return(REF(methodSym)))
- ELSE {
- def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
- BLOCK(
- REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
- Return(REF(methodSym))
- )
- }
+ REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+ Return(REF(methodSym))
)
- })
-
+ }
+ )
+ })
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
def testForName(name: Name): Tree => Tree = t => (
if (nme.CommonOpNames(name))
- gen.mkMethodCall(definitions.Boxes_isNumberOrBool, t :: Nil)
+ gen.mkMethodCall(Boxes_isNumberOrBool, t :: Nil)
else if (nme.BooleanOpNames(name))
t IS_OBJ BoxedBooleanClass.tpe
else
- gen.mkMethodCall(definitions.Boxes_isNumber, t :: Nil)
+ gen.mkMethodCall(Boxes_isNumber, t :: Nil)
)
- /** The Tree => Tree function in the return is necessary to prevent the original qual
+ /* The Tree => Tree function in the return is necessary to prevent the original qual
* from being duplicated in the resulting code. It may be a side-effecting expression,
* so all the test logic is routed through gen.evalOnce, which creates a block like
* { val x$1 = qual; if (x$1.foo || x$1.bar) f1(x$1) else f2(x$1) }
@@ -278,7 +188,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
else nme.NO_NAME
)
- definitions.getDeclIfDefined(BoxesRunTimeClass, methodName) match {
+ getDeclIfDefined(BoxesRunTimeClass, methodName) match {
case NoSymbol => None
case sym => assert(!sym.isOverloaded, sym) ; Some((sym, testForName(name)))
}
@@ -316,11 +226,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// If there's any chance this signature could be met by an Array.
val isArrayMethodSignature = {
def typesMatchApply = paramTypes match {
- case List(tp) => tp <:< IntClass.tpe
+ case List(tp) => tp <:< IntTpe
case _ => false
}
def typesMatchUpdate = paramTypes match {
- case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+ case List(tp1, tp2) => (tp1 <:< IntTpe) && isMaybeUnit
case _ => false
}
@@ -357,7 +267,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
def lookup = Apply(cache, List(qual1() GETCLASS())) // get Method object from cache
- def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
+ def invokeArgs = ArrayValue(TypeTree(ObjectTpe), params) // args for invocation
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
// exception catching machinery
@@ -407,99 +317,75 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
-/* val guardCallSite: Tree = {
- val cachedClass = addStaticVariableToClass("cachedClass", definitions.ClassClass.tpe, EmptyTree)
- val tmpVar = currentOwner.newVariable(ad.pos, unit.freshTermName(ad.pos, "x")).setInfo(definitions.AnyRefClass.tpe)
- atPos(ad.pos)(Block(List(
- ValDef(tmpVar, transform(qual))),
- If(Apply(Select(gen.mkAttributedRef(cachedClass), nme.EQ), List(getClass(Ident(tmpVar)))),
- Block(List(Assign(gen.mkAttributedRef(cachedClass), getClass(Ident(tmpVar)))),
- treeCopy.ApplyDynamic(ad, Ident(tmpVar), transformTrees(params))),
- EmptyTree)))
- }
- //println(guardCallSite)
-*/
- localTyper.typed(treeCopy.ApplyDynamic(ad, transform(qual), transformTrees(params)))
- }
- else {
-
- /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad@ApplyDynamic(qual, params) ### */
-
- /* This creates the tree that does the reflective call (see general comment
- * on the apply-dynamic tree for its format). This tree is simply composed
- * of three successive calls, first to getClass on the callee, then to
- * getMethod on the class, then to invoke on the method.
- * - getMethod needs an array of classes for choosing one amongst many
- * overloaded versions of the method. This is provided by paramTypeClasses
- * and must be done on the static type as Scala's dispatching is static on
- * the parameters.
- * - invoke needs an array of AnyRefs that are the method's arguments. The
- * erasure phase guarantees that any parameter passed to a dynamic apply
- * is compatible (through boxing). Boxed ints et al. is what invoke expects
- * when the applied method expects ints, hence no change needed there.
- * - in the end, the result of invoke must be fixed, again to deal with arrays.
- * This is provided by fixResult. fixResult will cast the invocation's result
- * to the method's return type, which is generally ok, except when this type
- * is a value type (int et al.) in which case it must cast to the boxed version
- * because invoke only returns object and erasure made sure the result is
- * expected to be an AnyRef. */
- val t: Tree = {
- val (mparams, resType) = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, ((params, mparams)))
- (mparams, resType)
- case tpe @ OverloadedType(pre, alts) =>
- unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
- alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
- case mt @ MethodType(mparams, resType) :: Nil =>
- unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
- (mparams, resType)
- case _ =>
- unit.error(ad.pos, "Cannot resolve overload.")
- (Nil, NoType)
- }
- }
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
-
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ {
+
+ /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad@ApplyDynamic(qual, params) ### */
+
+ /* This creates the tree that does the reflective call (see general comment
+ * on the apply-dynamic tree for its format). This tree is simply composed
+ * of three successive calls, first to getClass on the callee, then to
+ * getMethod on the class, then to invoke on the method.
+ * - getMethod needs an array of classes for choosing one amongst many
+ * overloaded versions of the method. This is provided by paramTypeClasses
+ * and must be done on the static type as Scala's dispatching is static on
+ * the parameters.
+ * - invoke needs an array of AnyRefs that are the method's arguments. The
+ * erasure phase guarantees that any parameter passed to a dynamic apply
+ * is compatible (through boxing). Boxed ints et al. is what invoke expects
+ * when the applied method expects ints, hence no change needed there.
+ * - in the end, the result of invoke must be fixed, again to deal with arrays.
+ * This is provided by fixResult. fixResult will cast the invocation's result
+ * to the method's return type, which is generally ok, except when this type
+ * is a value type (int et al.) in which case it must cast to the boxed version
+ * because invoke only returns object and erasure made sure the result is
+ * expected to be an AnyRef. */
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
}
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- /* For testing purposes, the dynamic application's condition
- * can be printed-out in great detail. Remove? */
- if (settings.debug) {
- def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
- val mstr = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- """| with
- | - declared parameter types: '%s'
- | - passed argument types: '%s'
- | - result type: '%s'""" .
- stripMargin.format(
- paramsToString(mparams),
- paramsToString(params),
- resType.toString
- )
- case _ => ""
- }
- log(
- """Dynamically application '%s.%s(%s)' %s - resulting code: '%s'""".format(
- qual, ad.symbol.name, paramsToString(params), mstr, t
- )
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
)
}
+ }
- /* We return the dynamic call tree, after making sure no other
- * clean-up transformation are to be applied on it. */
- transform(t)
+ /* For testing purposes, the dynamic application's condition
+ * can be printed-out in great detail. Remove? */
+ if (settings.debug) {
+ def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
+ val mstr = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ sm"""| with
+ | - declared parameter types: '${paramsToString(mparams)}'
+ | - passed argument types: '${paramsToString(params)}'
+ | - result type: '${resType.toString}'"""
+ case _ => ""
+ }
+ log(s"""Dynamically application '$qual.${ad.symbol.name}(${paramsToString(params)})' $mstr - resulting code: '$t'""")
}
- /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+
+ /* We return the dynamic call tree, after making sure no other
+ * clean-up transformation are to be applied on it. */
+ transform(t)
+ /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+ }
}
override def transform(tree: Tree): Tree = tree match {
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 06367009b6..75fb043070 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -81,7 +81,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val constrInfo: ConstrInfo = {
stats find (_.symbol.isPrimaryConstructor) match {
case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
- ConstrInfo(ddef, vparams map (_.symbol), rhs)
+ ConstrInfo(ddef, vparams map (_.symbol), rhs)
case x =>
// AnyVal constructor is OK
assert(clazz eq AnyValClass, "no constructor in template: impl = " + impl)
@@ -164,7 +164,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
if (from.name != nme.OUTER ||
from.tpe.typeSymbol.isPrimitiveValueClass) result
else localTyper.typedPos(to.pos) {
- IF (from OBJ_EQ NULL) THEN Throw(NewFromConstructor(NPEConstructor)) ELSE result
+ // `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
+ IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
}
}
@@ -235,7 +236,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
constrStatBuf += intoConstructor(impl.symbol, stat)
}
- // ----------- avoid making fields for symbols that are not accessed --------------
+ // ----------- avoid making parameter-accessor fields for symbols accessed only within the primary constructor --------------
// A sorted set of symbols that are known to be accessed outside the primary constructor.
val accessedSyms = new TreeSet[Symbol]((x, y) => x isLess y)
@@ -243,6 +244,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// a list of outer accessor symbols and their bodies
var outerAccessors: List[(Symbol, Tree)] = List()
+ val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
+
// Could symbol's definition be omitted, provided it is not accessed?
// This is the case if the symbol is defined in the current class, and
// ( the symbol is an object private parameter accessor field, or
@@ -250,12 +253,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def maybeOmittable(sym: Symbol) = sym.owner == clazz && (
sym.isParamAccessor && sym.isPrivateLocal ||
sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol &&
- !(clazz isSubClass DelayedInitClass)
+ !isDelayedInitSubclass
)
// Is symbol known to be accessed outside of the primary constructor,
// or is it a symbol whose definition cannot be omitted anyway?
- def mustbeKept(sym: Symbol) = !maybeOmittable(sym) || (accessedSyms contains sym)
+ def mustbeKept(sym: Symbol) = isDelayedInitSubclass || !maybeOmittable(sym) || (accessedSyms contains sym)
// A traverser to set accessedSyms and outerAccessors
val accessTraverser = new Traverser {
@@ -379,12 +382,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
* 'specInstance$' is added in phase specialize.
*/
def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
- // split the statements in presuper and postsuper
- // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
- // if (postfix.nonEmpty) {
- // prefix = prefix :+ postfix.head
- //postfix = postfix.tail
- //}
+ // // split the statements in presuper and postsuper
+ // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
+ // if (postfix.nonEmpty) {
+ // prefix = prefix :+ postfix.head
+ // postfix = postfix.tail
+ // }
if (usesSpecializedField && shouldGuard && stats.nonEmpty) {
// save them for duplication in the specialized subclass
@@ -414,143 +417,126 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
} else stats
}
-/*
- def isInitDef(stat: Tree) = stat match {
- case dd: DefDef => dd.symbol == delayedInitMethod
- case _ => false
- }
-*/
-
- /* Create a getter or a setter and enter into `clazz` scope */
- def addAccessor(sym: Symbol, name: TermName, flags: Long) = {
- val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz
- clazz.info.decls enter m
- }
- def addGetter(sym: Symbol): Symbol = {
- val getr = addAccessor(sym, sym.getterName, getterFlags(sym.flags))
- getr setInfo MethodType(List(), sym.tpe)
- defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
- getr
- }
-
- def addSetter(sym: Symbol): Symbol = {
- sym setFlag MUTABLE
- val setr = addAccessor(sym, sym.setterName, setterFlags(sym.flags))
- setr setInfo MethodType(setr.newSyntheticValueParams(List(sym.tpe)), UnitClass.tpe)
- defBuf += localTyper.typed {
- //util.trace("adding setter def for "+setr) {
- atPos(sym.pos) {
- DefDef(setr, paramss =>
- Assign(Select(This(clazz), sym), Ident(paramss.head.head)))
- }//}
- }
- setr
- }
+ /*
+ * Translation scheme for DelayedInit
+ * ----------------------------------
+ *
+ * Before returning, transformClassTemplate() rewrites DelayedInit subclasses.
+ * The list of statements that will end up in the primary constructor can be split into:
+ *
+ * (a) up to and including the super-constructor call.
+ * These statements can occur only in the (bytecode-level) primary constructor.
+ *
+ * (b) remaining statements
+ *
+ * The purpose of DelayedInit is leaving (b) out of the primary constructor and have their execution "delayed".
+ *
+ * The rewriting to achieve "delayed initialization" involves:
+ * (c) an additional, synthetic, public method encapsulating (b)
+ * (d) an additional, synthetic closure whose argless apply() just invokes (c)
+ * (e) after executing the statements in (a),
+ * the primary constructor instantiates (d) and passes it as argument
+ * to a `delayedInit()` invocation on the current instance.
+ * In turn, `delayedInit()` is a method defined as abstract in the `DelayedInit` trait
+ * so that it can be overridden (for an example see `scala.App`)
+ *
+ * The following helper methods prepare Trees as part of this rewriting:
+ *
+ * (f) `delayedEndpointDef()` prepares (c).
+ * A transformer, `constrStatTransformer`, is used to re-locate statements (b) from template-level
+ * to become statements in method (c). The main task here is re-formulating accesses to params
+ * of the primary constructors (to recap, (c) has zero-params) in terms of param-accessor fields.
+ * In a Delayed-Init subclass, each class-constructor gets a param-accessor field because `mustbeKept()` forces it.
+ *
+ * (g) `delayedInitClosure()` prepares (d)
+ *
+ * (h) `delayedInitCall()` prepares the `delayedInit()` invocation referred to in (e)
+ *
+ * Both (c) and (d) are added to the Template returned by `transformClassTemplate()`
+ *
+ * A note of historic interest: Previously the rewriting for DelayedInit would include in the closure body
+ * all of the delayed initialization sequence, which in turn required:
+ * - reformulating "accesses-on-this" into "accesses-on-outer", and
+ * - adding public getters and setters.
+ *
+ * @param stats the statements in (b) above
+ *
+ * @return the DefDef for (c) above
+ *
+ * */
+ def delayedEndpointDef(stats: List[Tree]): DefDef = {
- def ensureAccessor(sym: Symbol)(acc: => Symbol) =
- if (sym.owner == clazz && !sym.isMethod && sym.isPrivate) { // there's an access to a naked field of the enclosing class
- val getr = acc
- getr makeNotPrivate clazz
- getr
- } else {
- if (sym.owner == clazz) sym makeNotPrivate clazz
- NoSymbol
- }
+ val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
+ val methodSym = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
+ methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
- def ensureGetter(sym: Symbol): Symbol = ensureAccessor(sym) {
- val getr = sym.getter(clazz)
- if (getr != NoSymbol) getr else addGetter(sym)
- }
+ // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago.
+ val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym)
+ val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) }
- def ensureSetter(sym: Symbol): Symbol = ensureAccessor(sym) {
- var setr = sym.setter(clazz, hasExpandedName = false)
- if (setr == NoSymbol) setr = sym.setter(clazz, hasExpandedName = true)
- if (setr == NoSymbol) setr = addSetter(sym)
- setr
+ delayedDD.asInstanceOf[DefDef]
}
- def delayedInitClosure(stats: List[Tree]) =
- localTyper.typed {
+ /* @see overview at `delayedEndpointDef()` of the translation scheme for DelayedInit */
+ def delayedInitClosure(delayedEndPointSym: MethodSymbol): ClassDef = {
+ val satelliteClass = localTyper.typed {
atPos(impl.pos) {
val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
val closureParents = List(AbstractFunctionClass(0).tpe)
closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
- val outerField = (
+ val outerField: TermSymbol = (
closureClass
newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
setInfoAndEnter clazz.tpe
)
- val applyMethod = (
+ val applyMethod: MethodSymbol = (
closureClass
newMethod(nme.apply, impl.pos, FINAL)
- setInfoAndEnter MethodType(Nil, ObjectClass.tpe)
+ setInfoAndEnter MethodType(Nil, ObjectTpe)
)
val outerFieldDef = ValDef(outerField)
val closureClassTyper = localTyper.atOwner(closureClass)
val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
- val constrStatTransformer = new Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case This(_) if tree.symbol == clazz =>
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Select(This(closureClass), outerField)
- }
- }
- case _ =>
- super.transform {
- tree match {
- case Select(qual, _) =>
- val getter = ensureGetter(tree.symbol)
- if (getter != NoSymbol)
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Apply(Select(qual, getter), List())
- }
- }
- else tree
- case Assign(lhs @ Select(qual, _), rhs) =>
- val setter = ensureSetter(lhs.symbol)
- if (setter != NoSymbol)
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Apply(Select(qual, setter), List(rhs))
- }
- }
- else tree
- case _ =>
- tree.changeOwner(impl.symbol -> applyMethod)
- }
- }
+ def applyMethodStat =
+ applyMethodTyper.typed {
+ atPos(impl.pos) {
+ val receiver = Select(This(closureClass), outerField)
+ Apply(Select(receiver, delayedEndPointSym), Nil)
+ }
}
- }
-
- def applyMethodStats = constrStatTransformer.transformTrees(stats)
val applyMethodDef = DefDef(
sym = applyMethod,
vparamss = ListOfNil,
- rhs = Block(applyMethodStats, gen.mkAttributedRef(BoxedUnit_UNIT)))
+ rhs = Block(applyMethodStat, gen.mkAttributedRef(BoxedUnit_UNIT)))
ClassDef(
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
- body = List(applyMethodDef),
+ body = applyMethodDef :: Nil,
superPos = impl.pos)
}
}
+ satelliteClass.asInstanceOf[ClassDef]
+ }
+
+ /* @see overview at `delayedEndpointDef()` of the translation scheme for DelayedInit */
def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
}
/* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
def splitAtSuper(stats: List[Tree]) = {
- def isConstr(tree: Tree) = (tree.symbol ne null) && tree.symbol.isConstructor
+ def isConstr(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
+ case _ => (tree.symbol ne null) && tree.symbol.isConstructor
+ }
val (pre, rest0) = stats span (!isConstr(_))
val (supercalls, rest) = rest0 span (isConstr(_))
(pre ::: supercalls, rest)
@@ -566,13 +552,18 @@ abstract class Constructors extends Transform with ast.TreeDSL {
* See test case files/run/bug4680.scala, the output of which is wrong in many
* particulars.
*/
- val needsDelayedInit =
- (clazz isSubClass DelayedInitClass) /*&& !(defBuf exists isInitDef)*/ && remainingConstrStats.nonEmpty
+ val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
if (needsDelayedInit) {
- val dicl = new ConstructorTransformer(unit) transform delayedInitClosure(remainingConstrStats)
- defBuf += dicl
- remainingConstrStats = List(delayedInitCall(dicl))
+ val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
+ defBuf += delayedHook
+ val hookCallerClass = {
+ // transform to make the closure-class' default constructor assign the the outer instance to its param-accessor field.
+ val drillDown = new ConstructorTransformer(unit)
+ drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
+ }
+ defBuf += hookCallerClass
+ remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
}
// Assemble final constructor
@@ -594,12 +585,14 @@ abstract class Constructors extends Transform with ast.TreeDSL {
deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
} // transformClassTemplate
- override def transform(tree: Tree): Tree =
+ override def transform(tree: Tree): Tree = {
tree match {
case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isPrimitiveValueClass(tree.symbol) =>
deriveClassDef(tree)(transformClassTemplate)
case _ =>
super.transform(tree)
}
+ }
+
} // ConstructorTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index ac79c60254..0f65b11e9b 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -111,7 +111,7 @@ abstract class Erasure extends AddInterfaces
if (tpe1 eq tpe) t
else ExistentialType(tparams, tpe1)
case t =>
- if (boxedClass contains t.typeSymbol) ObjectClass.tpe
+ if (boxedClass contains t.typeSymbol) ObjectTpe
else tp
}
def apply(tp: Type): Type = tp.dealiasWiden match {
@@ -123,16 +123,16 @@ abstract class Erasure extends AddInterfaces
case tp1 @ TypeRef(pre, sym, args) =>
def argApply(tp: Type) = {
val tp1 = apply(tp)
- if (tp1.typeSymbol == UnitClass) ObjectClass.tpe
+ if (tp1.typeSymbol == UnitClass) ObjectTpe
else squashBoxed(tp1)
}
if (sym == ArrayClass && args.nonEmpty)
- if (unboundedGenericArrayLevel(tp1) == 1) ObjectClass.tpe
+ if (unboundedGenericArrayLevel(tp1) == 1) ObjectTpe
else mapOver(tp1)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
- ObjectClass.tpe
+ ObjectTpe
else if (sym == UnitClass)
- BoxedUnitClass.tpe
+ BoxedUnitTpe
else if (sym == NothingClass)
RuntimeNothingClass.tpe
else if (sym == NullClass)
@@ -145,7 +145,7 @@ abstract class Erasure extends AddInterfaces
}
case tp1 @ MethodType(params, restpe) =>
val params1 = mapOver(params)
- val restpe1 = if (restpe.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+ val restpe1 = if (restpe.typeSymbol == UnitClass) UnitTpe else apply(restpe)
if ((params1 eq params) && (restpe1 eq restpe)) tp1
else MethodType(params1, restpe1)
case tp1 @ RefinedType(parents, decls) =>
@@ -182,7 +182,7 @@ abstract class Erasure extends AddInterfaces
// java is unthrilled about seeing interfaces inherit from classes
val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
// traits should always list Object.
- if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectClass.tpe :: ok
+ if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
else ok
}
else parents
@@ -193,7 +193,7 @@ abstract class Erasure extends AddInterfaces
def boundsSig(bounds: List[Type]) = {
val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
val classPart = isClass match {
- case Nil => ":" // + boxedSig(ObjectClass.tpe)
+ case Nil => ":" // + boxedSig(ObjectTpe)
case x :: _ => ":" + boxedSig(x)
}
classPart :: (isTrait map boxedSig) mkString ":"
@@ -220,8 +220,8 @@ abstract class Erasure extends AddInterfaces
def argSig(tp: Type) =
if (existentiallyBound contains tp.typeSymbol) {
val bounds = tp.typeSymbol.info.bounds
- if (!(AnyRefClass.tpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
- else if (!(bounds.lo <:< NullClass.tpe)) "-" + boxedSig(bounds.lo)
+ if (!(AnyRefTpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
+ else if (!(bounds.lo <:< NullTpe)) "-" + boxedSig(bounds.lo)
else "*"
} else {
boxedSig(tp)
@@ -247,7 +247,7 @@ abstract class Erasure extends AddInterfaces
// If args isEmpty, Array is being used as a type constructor
if (sym == ArrayClass && args.nonEmpty) {
- if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectClass.tpe)
+ if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectTpe)
else ARRAY_TAG.toString+(args map (jsig(_))).mkString
}
else if (isTypeParameterInSig(sym, sym0)) {
@@ -255,16 +255,16 @@ abstract class Erasure extends AddInterfaces
"" + TVAR_TAG + sym.name + ";"
}
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
- jsig(ObjectClass.tpe)
+ jsig(ObjectTpe)
else if (sym == UnitClass)
- jsig(BoxedUnitClass.tpe)
+ jsig(BoxedUnitTpe)
else if (sym == NothingClass)
jsig(RuntimeNothingClass.tpe)
else if (sym == NullClass)
jsig(RuntimeNullClass.tpe)
else if (isPrimitiveValueClass(sym)) {
- if (!primitiveOK) jsig(ObjectClass.tpe)
- else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
+ if (!primitiveOK) jsig(ObjectTpe)
+ else if (sym == UnitClass) jsig(BoxedUnitTpe)
else abbrvTag(sym).toString
}
else if (sym.isDerivedValueClass) {
@@ -342,11 +342,6 @@ abstract class Erasure extends AddInterfaces
}
}
- // Each primitive value class has its own getClass for ultra-precise class object typing.
- private lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
- ScalaValueClasses map (_.tpe member nme.getClass_)
- )
-
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
// Methods on Any/Object which we rewrite here while we still know what
@@ -568,7 +563,7 @@ abstract class Erasure extends AddInterfaces
log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
arg
case _ =>
- (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
+ (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
}
}
}
@@ -688,7 +683,7 @@ abstract class Erasure extends AddInterfaces
tree match {
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
- val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
+ val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
@@ -704,7 +699,7 @@ abstract class Erasure extends AddInterfaces
val untyped =
// util.trace("new asinstanceof test") {
gen.evalOnce(qual1, context.owner, context.unit) { qual =>
- If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullClass.tpe)),
+ If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullTpe)),
Literal(Constant(null)) setType targ.tpe,
unbox(qual(), targ.tpe))
}
@@ -974,7 +969,6 @@ abstract class Erasure extends AddInterfaces
case Select(qual, _) => qual
case TypeApply(Select(qual, _), _) => qual
}
-
def preEraseAsInstanceOf = {
(fn: @unchecked) match {
case TypeApply(Select(qual, _), List(targ)) =>
@@ -991,7 +985,7 @@ abstract class Erasure extends AddInterfaces
def preEraseIsInstanceOf = {
fn match {
case TypeApply(sel @ Select(qual, name), List(targ)) =>
- if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
+ if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe)
unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
@@ -1002,7 +996,7 @@ abstract class Erasure extends AddInterfaces
List()) setPos tree.pos
targ.tpe match {
case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ val cmpOp = if (targ.tpe <:< AnyValTpe) Any_equals else Object_eq
atPos(tree.pos) {
Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
@@ -1032,8 +1026,9 @@ abstract class Erasure extends AddInterfaces
preEraseAsInstanceOf
} else if (fn.symbol == Any_isInstanceOf) {
preEraseIsInstanceOf
- } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
- // !!! Another spot where we produce overloaded types (see test run/t6301)
+ } else if (fn.symbol.isOnlyRefinementMember) {
+ // !!! Another spot where we produce overloaded types (see test pos/t6301)
+ log(s"${fn.symbol.fullLocationString} originates in refinement class - call will be implemented via reflection.")
ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
} else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
@@ -1161,12 +1156,19 @@ abstract class Erasure extends AddInterfaces
preErase(fun)
case Select(qual, name) =>
- val owner = tree.symbol.owner
- // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
+ val sym = tree.symbol
+ val owner = sym.owner
if (owner.isRefinementClass) {
- val overridden = tree.symbol.nextOverriddenSymbol
- assert(overridden != NoSymbol, tree.symbol)
- tree.symbol = overridden
+ sym.allOverriddenSymbols filterNot (_.owner.isRefinementClass) match {
+ case overridden :: _ =>
+ log(s"${sym.fullLocationString} originates in refinement class - replacing with ${overridden.fullLocationString}.")
+ tree.symbol = overridden
+ case Nil =>
+ // Ideally this should not be reached or reachable; anything which would
+ // get here should have been caught in the surrounding Apply.
+ devWarning(s"Failed to rewrite reflective apply - now don't know what to do with " + tree)
+ return treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name)
+ }
}
def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 367825c251..48263a496e 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab._
@@ -83,7 +84,7 @@ abstract class ExplicitOuter extends InfoTransform
else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
}
def newOuterAccessor(clazz: Symbol) = {
- val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val accFlags = SYNTHETIC | ARTIFACT | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
val restpe = if (clazz.isTrait) clazz.outerClass.tpe_* else clazz.outerClass.thisType
@@ -378,7 +379,7 @@ abstract class ExplicitOuter extends InfoTransform
if (outerAcc.isDeferred) EmptyTree
else This(currentClass) DOT outerField(currentClass)
- /** If we don't re-type the tree, we see self-type related crashes like #266.
+ /* If we don't re-type the tree, we see self-type related crashes like #266.
*/
localTyper typed {
(DEF(outerAcc) withPos currentClass.pos withType null) === rhs
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 6ff8792a45..7888198531 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -32,6 +32,21 @@ abstract class LambdaLift extends InfoTransform {
}
}
+ /** scala.runtime.*Ref classes */
+ private lazy val allRefClasses: Set[Symbol] = {
+ refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass)
+ }
+
+ /** Each scala.runtime.*Ref class has a static method `create(value)` that simply instantiates the Ref to carry that value. */
+ private lazy val refCreateMethod: Map[Symbol, Symbol] = {
+ mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.create))
+ }
+
+ /** Quite frequently a *Ref is initialized with its zero (e.g., null, 0.toByte, etc.) Method `zero()` of *Ref class encapsulates that pattern. */
+ private lazy val refZeroMethod: Map[Symbol, Symbol] = {
+ mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.zero))
+ }
+
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isCapturedVariable) capturedVariableType(sym, tpe = lifted(tp), erasedTypes = true)
else lifted(tp)
@@ -289,7 +304,7 @@ abstract class LambdaLift extends InfoTransform {
proxies(owner) =
for (fv <- freeValues.toList) yield {
val proxyName = proxyNames.getOrElse(fv, fv.name)
- val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags) setInfo fv.info
+ val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong) setInfo fv.info
if (owner.isClass) owner.info.decls enter proxy
proxy
}
@@ -444,56 +459,21 @@ abstract class LambdaLift extends InfoTransform {
case ValDef(mods, name, tpt, rhs) =>
if (sym.isCapturedVariable) {
val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
- /* Creating a constructor argument if one isn't present. */
- val constructorArg = rhs match {
- case EmptyTree =>
- sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
- case List(tp) => gen.mkZero(tp)
- case _ =>
- debugwarn("Couldn't determine how to properly construct " + sym)
- rhs
- }
- case arg => arg
+
+ val refTypeSym = sym.tpe.typeSymbol
+
+ val factoryCall = typer.typedPos(rhs.pos) {
+ rhs match {
+ case EmptyTree =>
+ val zeroMSym = refZeroMethod(refTypeSym)
+ gen.mkMethodCall(zeroMSym, Nil)
+ case arg =>
+ val createMSym = refCreateMethod(refTypeSym)
+ gen.mkMethodCall(createMSym, arg :: Nil)
+ }
}
- /* Wrap expr argument in new *Ref(..) constructor. But try/catch
- * is a problem because a throw will clear the stack and post catch
- * we would expect the partially-constructed object to be on the stack
- * for the call to init. So we recursively
- * search for "leaf" result expressions where we know its safe
- * to put the new *Ref(..) constructor or, if all else fails, transform
- * an expr to { val temp=expr; new *Ref(temp) }.
- * The reason we narrowly look for try/catch in captured var definitions
- * is because other try/catch expression have already been lifted
- * see SI-6863
- */
- def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos)(expr match {
- // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
- // a try/catch in final expression position.
- case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
- New(sym.tpe, expr)
- case Try(block, catches, finalizer) =>
- Try(refConstr(block), catches map refConstrCase, finalizer)
- case Block(stats, expr) =>
- Block(stats, refConstr(expr))
- case If(cond, trueBranch, falseBranch) =>
- If(cond, refConstr(trueBranch), refConstr(falseBranch))
- case Match(selector, cases) =>
- Match(selector, cases map refConstrCase)
- // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
- // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
- // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
- case _ =>
- debuglog("assigning expr to temp: " + (expr.pos))
- val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
- val tempDef = ValDef(tempSym, expr) setPos expr.pos
- val tempRef = Ident(tempSym) setPos expr.pos
- Block(tempDef, New(sym.tpe, tempRef))
- })
- def refConstrCase(cdef: CaseDef): CaseDef =
- CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
-
- treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
+ treeCopy.ValDef(tree, mods, name, tpt1, factoryCall)
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 69a93b482c..15ca916ac1 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -277,7 +277,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bmps.length > n)
bmps(n)
else {
- val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
+ val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteTpe)
enteringTyper {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 35df63b246..e0b1d9ea80 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -34,7 +34,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** A member of a trait is implemented statically if its implementation after the
* mixin transform is in the static implementation module. To be statically
* implemented, a member must be a method that belonged to the trait's implementation class
- * before (e.g. it is not abstract). Not statically implemented are
+ * before (i.e. it is not abstract). Not statically implemented are
* - non-private modules: these are implemented directly in the mixin composition class
* (private modules, on the other hand, are implemented statically, but their
* module variable is not. all such private modules are lifted, because
@@ -213,7 +213,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
val setter = clazz.newMethod(setterName, field.pos, newFlags)
// TODO preserve pre-erasure info?
- setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
+ setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe)
if (field.needsExpandedSetterName)
setter.name = nme.expandedSetterName(setter.name, clazz)
@@ -594,11 +594,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym.owner.info //todo: needed?
sym.owner.owner.info //todo: needed?
- assert(
- sym.owner.sourceModule ne NoSymbol,
- "" + sym.fullLocationString + " in " + sym.owner.owner + " " + sym.owner.owner.info.decls
- )
- REF(sym.owner.sourceModule) DOT sym
+ if (sym.owner.sourceModule eq NoSymbol)
+ abort(s"Cannot create static reference to $sym because ${sym.safeOwner} has no source module")
+ else
+ REF(sym.owner.sourceModule) DOT sym
}
def needsInitAndHasOffset(sym: Symbol) =
@@ -807,7 +806,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
}
- /** return a 'lazified' version of rhs. It uses double-checked locking to ensure
+ /* return a 'lazified' version of rhs. It uses double-checked locking to ensure
* initialization is performed at most once. For performance reasons the double-checked
* locking is split into two parts, the first (fast) path checks the bitmap without
* synchronizing, and if that fails it initializes the lazy val within the
@@ -1037,7 +1036,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else if (!clazz.isTrait) {
// This needs to be a def to avoid sharing trees
def accessedRef = accessedReference(sym)
- if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) {
+ if (isConcreteAccessor(sym)) {
// add accessor definitions
addDefDef(sym, {
if (sym.isSetter) {
@@ -1145,7 +1144,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
qual
case Apply(Select(qual, _), args) =>
- /** Changes `qual.m(args)` where m refers to an implementation
+ /* Changes `qual.m(args)` where m refers to an implementation
* class method to Q.m(S, args) where Q is the implementation module of
* `m` and S is the self parameter for the call, which
* is determined as follows:
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index ba19eb1035..f43e42c027 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -3,7 +3,8 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import scala.tools.nsc.symtab.Flags
@@ -51,7 +52,9 @@ import scala.annotation.tailrec
*/
abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
import global._
+ import definitions._
import Flags._
+
/** the name of the phase: */
val phaseName: String = "specialize"
@@ -67,19 +70,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
- import definitions.{
- BooleanClass, UnitClass, ArrayClass,
- ScalaValueClasses, isPrimitiveValueClass, isPrimitiveValueType,
- SpecializedClass, UnspecializedClass, AnyRefClass, ObjectClass,
- GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
- }
- import rootMirror.RootClass
/** TODO - this is a lot of maps.
*/
/** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
+ val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol]
/** Map a method symbol to a list of its specialized overloads in the same class. */
private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil
@@ -135,14 +131,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
+ // If we replace `isBoundedGeneric` with (tp <:< AnyRefTpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
specializedOn(sym).exists(s => !isPrimitiveValueClass(s)) &&
!isPrimitiveValueClass(tp.typeSymbol) &&
isBoundedGeneric(tp)
- //(tp <:< AnyRefClass.tpe)
+ //(tp <:< AnyRefTpe)
}
object TypeEnv {
@@ -163,7 +159,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
case (sym, tpe) =>
t2 get sym exists { t2tp =>
- (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
+ (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefTpe)
}
}
@@ -179,7 +175,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env forall { case (tvar, tpe) =>
tvar.isSpecialized && (concreteTypes(tvar) contains tpe) && {
(sym.typeParams contains tvar) ||
- (sym.owner != RootClass && (sym.owner.typeParams contains tvar))
+ (sym.owner != rootMirror.RootClass && (sym.owner.typeParams contains tvar))
}
}
}
@@ -293,7 +289,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
- if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+ if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefTpe
else tp
)
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
@@ -332,8 +328,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
newTermName(base.toString + "$"
- + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
+ + "m" + ms + types1.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "")
+ + "c" + cs + types2.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
}
)
@@ -359,7 +355,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specializedOn(sym) map (s => specializesClass(s).tpe) sorted
if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".")
types
}
@@ -379,7 +375,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// zip the keys with each permutation to create a TypeEnv.
// If we don't exclude the "all AnyRef" specialization, we will
// incur duplicate members and crash during mixin.
- loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
+ loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefTpe)) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -411,7 +407,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
loop(immutable.Set.empty, tpes)
}
def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = (
- if (definitions.neverHasTypeParameters(sym)) immutable.Set.empty
+ if (neverHasTypeParameters(sym)) immutable.Set.empty
else enteringTyper(specializedTypeVars(sym.info))
)
@@ -453,7 +449,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
sClassMap.getOrElseUpdate(tparam,
tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
- modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+ modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
).tpe
}
@@ -487,7 +483,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
foreach2(syms, cloned) { (orig, cln) =>
cln.removeAnnotation(SpecializedClass)
if (env.contains(orig))
- cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+ cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
}
cloned map (_ substInfo (syms, cloned))
}
@@ -496,7 +492,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the specialized symbol (class (specialization) or member (normalization)), leaves everything else as-is.
*/
private def mapAnyRefsInSpecSym(env: TypeEnv, origsym: Symbol, specsym: Symbol): TypeEnv = env map {
- case (sym, tp) if tp == AnyRefClass.tpe && sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
+ case (sym, AnyRefTpe) if sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
case x => x
}
@@ -504,8 +500,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the original class, leaves everything else as-is.
*/
private def mapAnyRefsInOrigCls(env: TypeEnv, origcls: Symbol): TypeEnv = env map {
- case (sym, tp) if (tp == AnyRefClass.tpe) && sym.owner == origcls => (sym, sym.tpe)
- case x => x
+ case (sym, AnyRefTpe) if sym.owner == origcls => (sym, sym.tpe)
+ case x => x
}
/** Specialize 'clazz', in the environment `outerEnv`. The outer
@@ -732,7 +728,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specSetter = mkAccessor(specVal, specGetter.setterName)
.resetFlag(STABLE)
specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
- UnitClass.tpe))
+ UnitTpe))
val origSetter = overrideIn(sClass, m.setter(clazz))
info(origSetter) = Forward(specSetter)
enterMember(specSetter)
@@ -808,7 +804,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil
- else {
+ else if (sym.hasDefault) {
+ /* Specializing default getters is useless, also see SI-7329 . */
+ sym.resetFlag(SPECIALIZED)
+ Nil
+ } else {
// debuglog("normalizeMember: " + sym.fullNameAsName('.').decode)
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
@@ -1008,27 +1008,25 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE)
typeEnv(om) = env
addConcreteSpecMethod(overriding)
- info(om) = (
- if (overriding.isDeferred) { // abstract override
- debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
- Forward(overriding)
- }
- else {
- // if the override is a normalized member, 'om' gets the
- // implementation from its original target, and adds the
- // environment of the normalized member (that is, any
- // specialized /method/ type parameter bindings)
- val impl = info get overriding match {
- case Some(NormalizedMember(target)) =>
- typeEnv(om) = env ++ typeEnv(overriding)
- target
- case _ =>
- overriding
- }
- info(overriding) = Forward(om setPos overriding.pos)
- SpecialOverride(impl)
+ if (overriding.isDeferred) { // abstract override
+ debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
+ info(om) = Forward(overriding)
+ }
+ else {
+ // if the override is a normalized member, 'om' gets the
+ // implementation from its original target, and adds the
+ // environment of the normalized member (that is, any
+ // specialized /method/ type parameter bindings)
+ info get overriding match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(overriding)
+ info(om) = Forward(target)
+ case _ =>
+ info(om) = SpecialOverride(overriding)
}
- )
+ info(overriding) = Forward(om setPos overriding.pos)
+ }
+
newOverload(overriding, om, env)
ifDebug(exitingSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
@@ -1147,7 +1145,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
)
private def unspecializableClass(tp: Type) = (
- definitions.isRepeatedParamType(tp) // ???
+ isRepeatedParamType(tp) // ???
|| tp.typeSymbol.isJavaDefined
|| tp.typeSymbol.isPackageClass
)
@@ -1387,13 +1385,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def transform1(tree: Tree) = {
val symbol = tree.symbol
- /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
+ /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
def specSym(qual: Tree): Symbol = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- def isMatch(member: Symbol) = (
- doesConform(symbol, tree.tpe, qual.tpe memberType member, env)
- && TypeEnv.includes(typeEnv(member), env)
- )
+ def isMatch(member: Symbol) = {
+ val memberType = qual.tpe memberType member
+
+ val residualTreeType = tree match {
+ case TypeApply(fun, targs) if fun.symbol == symbol =>
+ // SI-6308 Handle methods with only some type parameters specialized.
+ // drop the specialized type parameters from the PolyType, and
+ // substitute in the type environment.
+ val GenPolyType(tparams, tpe) = fun.tpe
+ val (from, to) = env.toList.unzip
+ val residualTParams = tparams.filterNot(env.contains)
+ GenPolyType(residualTParams, tpe).substituteTypes(from, to)
+ case _ => tree.tpe
+ }
+
+ (
+ doesConform(symbol, residualTreeType, memberType, env)
+ && TypeEnv.includes(typeEnv(member), env)
+ )
+ }
if (env.isEmpty) NoSymbol
else qual.tpe member specializedName(symbol, env) suchThat isMatch
}
@@ -1741,10 +1755,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (hasSpecializedFields) {
val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
- val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanClass.tpe)
+ val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe)
mbrs += atPos(sym.pos) {
- DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanClass.tpe)).setType(NoType)
+ DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType)
}
}
mbrs.toList
@@ -1752,21 +1766,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Create specialized class definitions */
def implSpecClasses(trees: List[Tree]): List[Tree] = {
- val buf = new mutable.ListBuffer[Tree]
- for (tree <- trees)
- tree match {
- case ClassDef(_, _, _, impl) =>
- tree.symbol.info // force specialization
- for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
- val parents = specCls.info.parents.map(TypeTree)
- buf +=
- ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
- .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
- debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
- }
- case _ =>
- }
- buf.toList
+ trees flatMap {
+ case tree @ ClassDef(_, _, _, impl) =>
+ tree.symbol.info // force specialization
+ for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
+ debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
+ val parents = specCls.info.parents.map(TypeTree)
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
+ }
+ case _ => Nil
+ } sortBy (_.name.decoded)
}
}
@@ -1855,6 +1865,5 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
resultTree
- }
- }
+ } }
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 92ed7fc555..6f422fcc90 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -3,7 +3,8 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab.Flags
@@ -87,98 +88,112 @@ abstract class TailCalls extends Transform {
* </p>
*/
class TailCallElimination(unit: CompilationUnit) extends Transformer {
- private val defaultReason = "it contains a recursive call not in tail position"
+ private def defaultReason = "it contains a recursive call not in tail position"
+ private val failPositions = perRunCaches.newMap[TailContext, Position]() withDefault (_.methodPos)
+ private val failReasons = perRunCaches.newMap[TailContext, String]() withDefaultValue defaultReason
+ private def tailrecFailure(ctx: TailContext) {
+ val method = ctx.method
+ val failReason = failReasons(ctx)
+ val failPos = failPositions(ctx)
+
+ unit.error(failPos, s"could not optimize @tailrec annotated $method: $failReason")
+ }
/** Has the label been accessed? Then its symbol is in this set. */
- private val accessed = new scala.collection.mutable.HashSet[Symbol]()
+ private val accessed = perRunCaches.newSet[Symbol]()
// `accessed` was stored as boolean in the current context -- this is no longer tenable
// with jumps to labels in tailpositions now considered in tailposition,
// a downstream context may access the label, and the upstream one will be none the wiser
// this is necessary because tail-calls may occur in places where syntactically they seem impossible
// (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
+ sealed trait TailContext {
+ def method: Symbol // current method
+ def tparams: List[Symbol] // type parameters
+ def methodPos: Position // default position for failure reporting
+ def tailPos: Boolean // context is in tail position
+ def label: Symbol // new label, tail call target
+ def tailLabels: Set[Symbol]
+
+ def enclosingType = method.enclClass.typeOfThis
+ def isEligible = method.isEffectivelyFinal
+ def isMandatory = method.hasAnnotation(TailrecClass)
+ def isTransformed = isEligible && accessed(label)
+
+ def newThis(pos: Position) = {
+ def msg = "Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
+ method.ownerChain.mkString(" -> "),
+ currentClass.ownerChain.mkString(" -> ")
+ )
+ logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
+ }
+ override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+ }
- class Context() {
- /** The current method */
- var method: Symbol = NoSymbol
-
- // symbols of label defs in this method that are in tail position
- var tailLabels: Set[Symbol] = Set()
-
- /** The current tail-call label */
- var label: Symbol = NoSymbol
-
- /** The expected type arguments of self-recursive calls */
- var tparams: List[Symbol] = Nil
-
- /** Tells whether we are in a (possible) tail position */
- var tailPos = false
-
- /** The reason this method could not be optimized. */
- var failReason = defaultReason
- var failPos = method.pos
+ object EmptyTailContext extends TailContext {
+ def method = NoSymbol
+ def tparams = Nil
+ def methodPos = NoPosition
+ def tailPos = false
+ def label = NoSymbol
+ def tailLabels = Set.empty[Symbol]
+ }
- def this(that: Context) = {
- this()
- this.method = that.method
- this.tparams = that.tparams
- this.tailPos = that.tailPos
- this.failPos = that.failPos
- this.label = that.label
- this.tailLabels = that.tailLabels
+ class DefDefTailContext(dd: DefDef) extends TailContext {
+ def method = dd.symbol
+ def tparams = dd.tparams map (_.symbol)
+ def methodPos = dd.pos
+ def tailPos = true
+
+ lazy val label = mkLabel()
+ lazy val tailLabels = {
+ // labels are local to a method, so only traverse the rhs of a defdef
+ val collector = new TailPosLabelsTraverser
+ collector traverse dd.rhs
+ collector.tailLabels.toSet
}
- def this(dd: DefDef) {
- this()
- this.method = dd.symbol
- this.tparams = dd.tparams map (_.symbol)
- this.tailPos = true
- this.failPos = dd.pos
-
- /* Create a new method symbol for the current method and store it in
- * the label field.
- */
- this.label = {
- val label = method.newLabel(newTermName("_" + method.name), method.pos)
- val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
- label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
- }
+
+ private def mkLabel() = {
+ val label = method.newLabel(newTermName("_" + method.name), method.pos)
+ val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
+ label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
if (isEligible)
label substInfo (method.tpe.typeParams, tparams)
- }
-
- def enclosingType = method.enclClass.typeOfThis
- def isEligible = method.isEffectivelyFinal
- // @tailrec annotation indicates mandatory transformation
- def isMandatory = method.hasAnnotation(TailrecClass)
- def isTransformed = isEligible && accessed(label)
- def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
- def newThis(pos: Position) = logResult("Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
- method.ownerChain.mkString(" -> "), currentClass.ownerChain.mkString(" -> "))) {
- method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+ label
}
-
- override def toString(): String = (
- "" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
- " Label: " + label + " Label type: " + label.info
- )
+ private def isRecursiveCall(t: Tree) = {
+ val receiver = t.symbol
+
+ ( (receiver != null)
+ && receiver.isMethod
+ && (method.name == receiver.name)
+ && (method.enclClass isSubClass receiver.enclClass)
+ )
+ }
+ def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
}
-
- private var ctx: Context = new Context()
- private def noTailContext() = {
- val t = new Context(ctx)
- t.tailPos = false
- t
+ class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+ def method = that.method
+ def tparams = that.tparams
+ def methodPos = that.methodPos
+ def tailLabels = that.tailLabels
+ def label = that.label
}
+ private var ctx: TailContext = EmptyTailContext
+ private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
+ private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
/** Rewrite this tree to contain no tail recursive calls */
- def transform(tree: Tree, nctx: Context): Tree = {
+ def transform(tree: Tree, nctx: TailContext): Tree = {
val saved = ctx
ctx = nctx
try transform(tree)
finally this.ctx = saved
}
+ def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
def noTailTransforms(trees: List[Tree]) = {
val nctx = noTailContext()
@@ -192,7 +207,6 @@ abstract class TailCalls extends Transform {
case Select(qual, _) => qual
case _ => EmptyTree
}
-
def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen
def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen
def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos
@@ -204,25 +218,26 @@ abstract class TailCalls extends Transform {
*/
def fail(reason: String) = {
debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
-
- ctx.failReason = reason
+ failReasons(ctx) = reason
treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
/* Position of failure is that of the tree being considered. */
def failHere(reason: String) = {
- ctx.failPos = fun.pos
+ failPositions(ctx) = fun.pos
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
-
accessed += ctx.label
- typedPos(fun.pos)(Apply(Ident(ctx.label), noTailTransform(recv) :: transformArgs))
+ typedPos(fun.pos) {
+ val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
+ Apply(Ident(ctx.label), noTailTransform(recv) :: args)
+ }
}
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targeting supertype " + receiver.tpe)
+ if (receiverIsSuper) failHere("it contains a recursive call targeting a supertype")
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
@@ -238,37 +253,23 @@ abstract class TailCalls extends Transform {
super.transform(tree)
- case dd @ DefDef(_, _, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
- val newCtx = new Context(dd)
- def isRecursiveCall(t: Tree) = {
- val sym = t.symbol
- (sym != null) && {
- sym.isMethod && (dd.symbol.name == sym.name) && (dd.symbol.enclClass isSubClass sym.enclClass)
- }
- }
- if (newCtx.isMandatory) {
- if (!rhs0.exists(isRecursiveCall)) {
- unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
- }
- }
+ case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ val newCtx = new DefDefTailContext(dd)
+ if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
+ unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
- // labels are local to a method, so only traverse the rhs of a defdef
- val collectTailPosLabels = new TailPosLabelsTraverser
- collectTailPosLabels traverse rhs0
- newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
-
- debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
+ debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}")
val newRHS = transform(rhs0, newCtx)
- deriveDefDef(tree){rhs =>
+ deriveDefDef(tree) { rhs =>
if (newCtx.isTransformed) {
/* We have rewritten the tree, but there may be nested recursive calls remaining.
* If @tailrec is given we need to fail those now.
*/
if (newCtx.isMandatory) {
for (t @ Apply(fn, _) <- newRHS ; if fn.symbol == newCtx.method) {
- newCtx.failPos = t.pos
- newCtx.tailrecFailure()
+ failPositions(newCtx) = t.pos
+ tailrecFailure(newCtx)
}
}
val newThis = newCtx.newThis(tree.pos)
@@ -276,12 +277,12 @@ abstract class TailCalls extends Transform {
typedPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
- LabelDef(newCtx.label, newThis :: vpSyms, newRHS)
+ LabelDef(newCtx.label, newThis :: vpSyms, mkAttributedCastHack(newRHS, newCtx.label.tpe.resultType))
))
}
else {
- if (newCtx.isMandatory && newRHS.exists(isRecursiveCall))
- newCtx.tailrecFailure()
+ if (newCtx.isMandatory && (newCtx containsRecursiveCall newRHS))
+ tailrecFailure(newCtx)
newRHS
}
@@ -342,27 +343,25 @@ abstract class TailCalls extends Transform {
case Apply(tapply @ TypeApply(fun, targs), vargs) =>
rewriteApply(tapply, fun, targs, vargs)
- case Apply(fun, args) =>
- if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
- treeCopy.Apply(tree, fun, transformTrees(args))
- else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
- // this is to detect tailcalls in translated matches
- // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
- // thus, the argument to the call is in tailposition
- val saved = ctx.tailPos
- ctx.tailPos = true
- debuglog("in tailpos label: "+ args.head)
- val res = transform(args.head)
- ctx.tailPos = saved
- if (res ne args.head) {
- // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
- // must leave the jump to the original tailpos-label (fun)!
- // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
- treeCopy.Apply(tree, fun, List(res))
- }
- else rewriteApply(fun, fun, Nil, args)
- } else rewriteApply(fun, fun, Nil, args)
+ case Apply(fun, args) if fun.symbol == Boolean_or || fun.symbol == Boolean_and =>
+ treeCopy.Apply(tree, fun, transformTrees(args))
+
+ // this is to detect tailcalls in translated matches
+ // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+ // thus, the argument to the call is in tailposition
+ case Apply(fun, args @ (arg :: Nil)) if fun.symbol.isLabel && ctx.tailLabels(fun.symbol) =>
+ debuglog(s"in tailpos label: $arg")
+ val res = yesTailTransform(arg)
+ // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+ // must leave the jump to the original tailpos-label (fun)!
+ // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
+ if (res ne arg)
+ treeCopy.Apply(tree, fun, res :: Nil)
+ else
+ rewriteApply(fun, fun, Nil, args)
+ case Apply(fun, args) =>
+ rewriteApply(fun, fun, Nil, args)
case Alternative(_) | Star(_) | Bind(_, _) =>
sys.error("We should've never gotten inside a pattern")
case Select(qual, name) =>
@@ -373,6 +372,13 @@ abstract class TailCalls extends Transform {
super.transform(tree)
}
}
+
+ // Workaround for SI-6900. Uncurry installs an InfoTransformer and a tree Transformer.
+ // These leave us with conflicting view on method signatures; the parameter symbols in
+ // the MethodType can be clones of the ones originally found on the parameter ValDef, and
+ // consequently appearing in the typechecked RHS of the method.
+ private def mkAttributedCastHack(tree: Tree, tpe: Type) =
+ gen.mkAttributedCast(tree, tpe)
}
// collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 8c1d0a76d0..64f4e579e1 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -3,7 +3,8 @@
* @author
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab.Flags._
@@ -141,7 +142,7 @@ abstract class UnCurry extends InfoTransform
/** Return non-local return key for given method */
private def nonLocalReturnKey(meth: Symbol) =
nonLocalReturnKeys.getOrElseUpdate(meth,
- meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectClass.tpe
+ meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectTpe
)
/** Generate a non-local return throw with given return expression from given method.
@@ -183,7 +184,7 @@ abstract class UnCurry extends InfoTransform
THEN ((ex DOT nme.value)())
ELSE (Throw(Ident(ex)))
)
- val keyDef = ValDef(key, New(ObjectClass.tpe))
+ val keyDef = ValDef(key, New(ObjectTpe))
val tryCatch = Try(body, pat -> rhs)
for (Try(t, catches, _) <- body ; cdef <- catches ; if treeInfo catchesThrowable cdef)
@@ -290,7 +291,7 @@ abstract class UnCurry extends InfoTransform
def getClassTag(tp: Type): Tree = {
val tag = localTyper.resolveClassTag(tree.pos, tp)
// Don't want bottom types getting any further than this (SI-4024)
- if (tp.typeSymbol.isBottomClass) getClassTag(AnyClass.tpe)
+ if (tp.typeSymbol.isBottomClass) getClassTag(AnyTpe)
else if (!tag.isEmpty) tag
else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi)
else localTyper.TyperErrorGen.MissingClassTagError(tree, tp)
@@ -470,7 +471,7 @@ abstract class UnCurry extends InfoTransform
val fn1 = withInPattern(value = false)(transform(fn))
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args.length))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -546,7 +547,7 @@ abstract class UnCurry extends InfoTransform
def isThrowable(pat: Tree): Boolean = pat match {
case Typed(Ident(nme.WILDCARD), tpt) =>
- tpt.tpe =:= ThrowableClass.tpe
+ tpt.tpe =:= ThrowableTpe
case Bind(_, pat) =>
isThrowable(pat)
case _ =>
@@ -632,7 +633,8 @@ abstract class UnCurry extends InfoTransform
*
* This transformation erases the dependent method types by:
* - Widening the formal parameter type to existentially abstract
- * over the prior parameters (using `packSymbols`)
+ * over the prior parameters (using `packSymbols`). This transformation
+ * is performed in the the `InfoTransform`er [[scala.reflect.internal.transform.UnCurry]].
* - Inserting casts in the method body to cast to the original,
* precise type.
*
@@ -660,15 +662,14 @@ abstract class UnCurry extends InfoTransform
*/
def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
import dd.{ vparamss, rhs }
- val vparamSyms = vparamss flatMap (_ map (_.symbol))
-
val paramTransforms: List[ParamTransform] =
- vparamss.flatten.map { p =>
- val declaredType = p.symbol.info
- // existentially abstract over value parameters
- val packedType = typer.packSymbols(vparamSyms, declaredType)
- if (packedType =:= declaredType) Identity(p)
+ map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) =>
+ val packedType = infoParam.info
+ if (packedType =:= p.symbol.info) Identity(p)
else {
+ // The Uncurry info transformer existentially abstracted over value parameters
+ // from the previous parameter lists.
+
// Change the type of the param symbol
p.symbol updateInfo packedType
@@ -680,8 +681,8 @@ abstract class UnCurry extends InfoTransform
// the method body to refer to this, rather than the parameter.
val tempVal: ValDef = {
val tempValName = unit freshTermName (p.name + "$")
- val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
- atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info)))
}
Packed(newParam, tempVal)
}
@@ -699,13 +700,6 @@ abstract class UnCurry extends InfoTransform
Block(tempVals, rhsSubstituted)
}
- // update the type of the method after uncurry.
- dd.symbol updateInfo {
- val GenPolyType(tparams, tp) = dd.symbol.info
- logResult(s"erased dependent param types for ${dd.symbol.info}") {
- GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
- }
- }
(allParams :: Nil, rhs1)
}
}
@@ -741,7 +735,7 @@ abstract class UnCurry extends InfoTransform
// becomes def foo[T](a: Int, b: Array[Object])
// instead of def foo[T](a: Int, b: Array[T]) ===> def foo[T](a: Int, b: Object)
arrayType(
- if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectClass.tpe
+ if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectTpe
else arg
)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 92b7700c04..069484ff65 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -4,7 +4,8 @@
* @author Adriaan Moors
*/
-package scala.tools.nsc.transform.patmat
+package scala
+package tools.nsc.transform.patmat
import scala.language.postfixOps
import scala.collection.mutable
@@ -384,18 +385,35 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// else debug.patmat("NOT implies: "+(lower, upper))
- /* does V = C preclude V having value `other`?
- (1) V = null is an exclusive assignment,
- (2) V = A and V = B, for A and B value constants, are mutually exclusive unless A == B
- we err on the safe side, for example:
- - assume `val X = 1; val Y = 1`, then
- (2: Int) match { case X => case Y => <falsely considered reachable> }
- - V = 1 does not preclude V = Int, or V = Any, it could be said to preclude V = String, but we don't model that
-
- (3) for types we could try to do something fancy, but be conservative and just say no
+ /** Does V=A preclude V=B?
+ *
+ * (0) A or B must be in the domain to draw any conclusions.
+ *
+ * For example, knowing the the scrutinee is *not* true does not
+ * statically exclude it from being `X`, because that is an opaque
+ * Boolean.
+ *
+ * val X = true
+ * (true: Boolean) match { case true => case X <reachable> }
+ *
+ * (1) V = null excludes assignment to any other constant (modulo point #0). This includes
+ * both values and type tests (which are both modelled here as `Const`)
+ * (2) V = A and V = B, for A and B domain constants, are mutually exclusive unless A == B
+ *
+ * (3) We only reason about test tests as being excluded by null assignments, otherwise we
+ * only consider value assignments.
+ * TODO: refine this, a == 0 excludes a: String, or `a: Int` excludes `a: String`
+ * (since no value can be of both types. See also SI-7211)
+ *
+ * NOTE: V = 1 does not preclude V = Int, or V = Any, it could be said to preclude
+ * V = String, but we don't model that.
*/
- def excludes(a: Const, b: Const): Boolean =
- a != b && ((a == NullConst || b == NullConst) || (a.isValue && b.isValue))
+ def excludes(a: Const, b: Const): Boolean = {
+ val bothInDomain = domain exists (d => d(a) && d(b))
+ val eitherIsNull = a == NullConst || b == NullConst
+ val bothAreValues = a.isValue && b.isValue
+ bothInDomain && (eitherIsNull || bothAreValues) && (a != b)
+ }
// if(r) debug.patmat("excludes : "+(a, a.tp, b, b.tp))
// else debug.patmat("NOT excludes: "+(a, b))
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index 006c7bd85f..f527c30b8a 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -19,7 +19,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// we use subtyping as a model for implication between instanceof tests
// i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
// unfortunately this is not true in general:
- // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
val tpValue = tp.typeSymbol.isPrimitiveValueClass
@@ -28,7 +28,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// this allows us to reuse subtyping as a model for implication between instanceOf tests
// the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
val tpImpliedNormalizedToAny =
- if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+ if (tpImplied =:= (if (tpValue) AnyValTpe else AnyRefTpe)) AnyTpe
else tpImplied
tp <:< tpImpliedNormalizedToAny
@@ -48,7 +48,7 @@ trait TreeAndTypeAnalysis extends Debugging {
tp.typeSymbol match {
// TODO case _ if tp.isTupleType => // recurse into component types?
case UnitClass =>
- Some(List(UnitClass.tpe))
+ Some(List(UnitTpe))
case BooleanClass =>
Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
// TODO case _ if tp.isTupleType => // recurse into component types
@@ -95,7 +95,9 @@ trait TreeAndTypeAnalysis extends Debugging {
// TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
// similar to typer.infer.approximateAbstracts
object typeArgsToWildcardsExceptArray extends TypeMap {
- def apply(tp: Type): Type = tp match {
+ // SI-6771 dealias would be enough today, but future proofing with the dealiasWiden.
+ // See neg/t6771b.scala for elaboration
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
TypeRef(pre, sym, args map (_ => WildcardType))
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 889615a39f..1e4c56529c 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -128,7 +128,7 @@ trait MatchCodeGen extends Interface {
// __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
// __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitTpe)), next)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index 31b04d0bd6..9854e4ef62 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -143,7 +143,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val storedCond = freshSym(pos, BooleanTpe, "rc") setFlag MUTABLE
lazy val treesToHoist: List[Tree] = {
nextBinder setFlag MUTABLE
List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
@@ -503,7 +503,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
- val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe)
val alternativesSupported = true
val canJump = true
@@ -543,7 +543,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
else {
// match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
val scrutToInt: Tree =
- if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ if (scrutSym.tpe =:= IntTpe) REF(scrutSym)
else (REF(scrutSym) DOT (nme.toInt))
Some(BLOCK(
VAL(scrutSym) === scrut,
@@ -571,16 +571,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
case _ => false
}
- lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableTpe)
def defaultBody: Tree = Throw(CODE.REF(defaultSym))
def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableTpe)))) IF guard) ==> body
}}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index c8c3b741f1..fcee142932 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -14,14 +14,8 @@ import scala.reflect.internal.util.Statistics
*/
trait MatchTranslation { self: PatternMatching =>
import PatternMatchingStats._
- import global.{phase, currentRun, Symbol,
- Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
- Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
- Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
- Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
- elimAnonymousClass, asCompactDebugString, hasLength, devWarning}
- import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
- repeatedToSeq, isRepeatedParamType, getProductArgs}
+ import global._
+ import definitions._
import global.analyzer.{ErrorUtils, formalTypes}
trait MatchTranslator extends TreeMakers {
@@ -169,7 +163,7 @@ trait MatchTranslation { self: PatternMatching =>
val bindersAndCases = caseDefs map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val caseScrutSym = freshSym(pos, pureType(ThrowableTpe))
(caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
@@ -179,10 +173,10 @@ trait MatchTranslation { self: PatternMatching =>
}
val catches = if (swatches.nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val scrutSym = freshSym(pos, pureType(ThrowableTpe))
val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
- val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+ val exSym = freshSym(pos, pureType(ThrowableTpe), "ex")
List(
atPos(pos) {
@@ -194,7 +188,7 @@ trait MatchTranslation { self: PatternMatching =>
})
}
- typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ typer.typedCases(catches, ThrowableTpe, WildcardType)
}
@@ -595,7 +589,7 @@ trait MatchTranslation { self: PatternMatching =>
def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
@@ -623,7 +617,7 @@ trait MatchTranslation { self: PatternMatching =>
// what's the extractor's result type in the monad?
// turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ if (resultType.typeSymbol == BooleanClass) UnitTpe
else matchMonadResult(resultType)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 4dff445fe8..bce0a077fb 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -19,7 +19,7 @@ import scala.reflect.internal.util.Position
*/
trait MatchTreeMaking extends MatchCodeGen with Debugging {
import global._
- import definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+ import definitions._
final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
object Suppression {
@@ -414,7 +414,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
// must use == to support e.g. List() == Nil
case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
- case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefTpe
=> eqTest(expTp(CODE.NULL), testedBinder)
case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
@@ -428,7 +428,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// since the types conform, no further checking is required
if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
// have to test outer and non-null only when it's a reference type
- else if (expectedTp <:< AnyRefClass.tpe) {
+ else if (expectedTp <:< AnyRefTpe) {
// do non-null check first to ensure we won't select outer on null
if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
else nonNullTest(testedBinder)
@@ -476,7 +476,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
)
- val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => mkFALSE))
codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index 8be8b72130..63834ae51e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -129,8 +129,9 @@ trait Interface extends ast.TreeDSL {
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/** Interface with user-defined match monad?
- * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+ * if there's a <code>__match</code> in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+ {{{
type Matcher[P[_], M[+_], A] = {
def flatMap[B](f: P[A] => M[B]): M[B]
def orElse[B >: A](alternative: => M[B]): M[B]
@@ -144,12 +145,14 @@ trait Interface extends ast.TreeDSL {
def one[T](x: P[T]): M[T]
def guard[T](cond: P[Boolean], then: => P[T]): M[T]
}
+ }}}
* P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
- * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+ * if no <code>__match</code> is found, we assume the following implementation (and generate optimized code accordingly)
+ {{{
object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
def zero = None
def one[T](x: T) = Some(x)
@@ -157,6 +160,7 @@ trait Interface extends ast.TreeDSL {
def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
}
+ }}}
*/
trait MatchMonadInterface {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index a0fb6e82fc..3c7dc79636 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -8,6 +8,7 @@ package scala.tools.nsc.transform.patmat
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
+import scala.language.postfixOps
// naive CNF translation and simple DPLL solver
trait Solving extends Logic {
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 4210d0b9fb..54e4fefc15 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -146,7 +146,7 @@ trait AnalyzerPlugins { self: Analyzer =>
/**
* Modify the type of a return expression. By default, return expressions have type
- * NothingClass.tpe.
+ * NothingTpe.
*
* @param tpe The type of the return expression
* @param typer The typer that was used for typing the return tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 0686b28079..31a31df764 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -247,7 +247,7 @@ trait Checkable {
case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
case RefinedType(_, decls) if !decls.isEmpty => false
case p =>
- new CheckabilityChecker(AnyClass.tpe, p) isCheckable
+ new CheckabilityChecker(AnyTpe, p) isCheckable
})
)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 65bfd8e34e..56ed0ee16c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import java.lang.ArithmeticException
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 89fc55bc2c..7fa199afaf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -21,47 +21,65 @@ trait ContextErrors {
import global._
import definitions._
- object ErrorKinds extends Enumeration {
- type ErrorKind = Value
- val Normal, Access, Ambiguous, Divergent = Value
- }
-
- import ErrorKinds.ErrorKind
-
- trait AbsTypeError extends Throwable {
+ abstract class AbsTypeError extends Throwable {
def errPos: Position
def errMsg: String
- def kind: ErrorKind
+ override def toString() = "[Type error at:" + errPos + "] " + errMsg
}
- case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
- extends AbsTypeError {
-
- def errPos:Position = underlyingTree.pos
- override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
+ abstract class TreeTypeError extends AbsTypeError {
+ def underlyingTree: Tree
+ def errPos = underlyingTree.pos
}
- case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ case class NormalTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
+
+ case class AccessTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
+
+ case class AmbiguousTypeError(errPos: Position, errMsg: String)
+ extends AbsTypeError
+
+ case class SymbolTypeError(underlyingSym: Symbol, errMsg: String)
extends AbsTypeError {
def errPos = underlyingSym.pos
}
- case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ case class TypeErrorWrapper(ex: TypeError)
extends AbsTypeError {
def errMsg = ex.msg
def errPos = ex.pos
}
- case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError)
extends AbsTypeError {
def errMsg = ex.msg
def errPos = tree.pos
}
- case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError
+ // Unlike other type errors diverging implicit expansion
+ // will be re-issued explicitly on failed implicit argument search.
+ // This is because we want to:
+ // 1) provide better error message than just "implicit not found"
+ // 2) provide the type of the implicit parameter for which we got diverging expansion
+ // (pt at the point of divergence gives less information to the user)
+ // Note: it is safe to delay error message generation in this case
+ // becasue we don't modify implicits' infos.
+ case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol)
+ extends TreeTypeError {
+ def errMsg: String = errMsgForPt(pt0)
+ def withPt(pt: Type): AbsTypeError = this.copy(pt0 = pt)
+ private def errMsgForPt(pt: Type) =
+ s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}"
+ }
+
+ case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
- case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError
+ case class PosAndMsgTypeError(errPos: Position, errMsg: String)
+ extends AbsTypeError
object ErrorUtils {
def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) {
@@ -72,21 +90,13 @@ trait ContextErrors {
issueTypeError(SymbolTypeError(sym, msg))
}
- def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) {
- issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent))
- }
-
def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
context.issueAmbiguousError(pre, sym1, sym2, err)
}
def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
- def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
- def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
-
- "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
- }
+ def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req)
}
def notAnyRefMessage(found: Type): String = {
@@ -127,7 +137,7 @@ trait ContextErrors {
}
issueNormalTypeError(tree,
"stable identifier required, but "+tree+" found." + (
- if (isStableExceptVolatile(tree)) addendum else ""))
+ if (treeInfo.hasVolatileType(tree)) addendum else ""))
setError(tree)
}
@@ -171,7 +181,7 @@ trait ContextErrors {
}
assert(!foundType.isErroneous && !req.isErroneous, (foundType, req))
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(foundType, req, infer.isPossiblyMissingArgs(foundType, req))) )
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(foundType, req)))
infer.explainTypes(foundType, req)
}
@@ -450,7 +460,7 @@ trait ContextErrors {
def AbstractionFromVolatileTypeError(vd: ValDef) =
issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
- private[ContextErrors] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
+ private[scala] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
"wrong number of type parameters for "+treeSymTypeMsg(fun)
def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
@@ -466,7 +476,7 @@ trait ContextErrors {
// doTypeApply
//tryNamesDefaults
def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "macros application do not support named and/or default arguments")
+ NormalTypeError(tree, "macro applications do not support named and/or default arguments")
def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
@@ -561,7 +571,7 @@ trait ContextErrors {
//adapt
def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
val message =
- if (meth.isMacro) MacroPartialApplicationErrorMessage
+ if (meth.isMacro) MacroTooFewArgumentListsMessage
else "missing arguments for " + meth.fullLocationString + (
if (meth.isConstructor) ""
else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
@@ -592,6 +602,10 @@ trait ContextErrors {
setError(tree)
}
+ // typedPattern
+ def PatternMustBeValue(pat: Tree, pt: Type) =
+ issueNormalTypeError(pat, s"pattern must be a value: $pat"+ typePatternAdvice(pat.tpe.typeSymbol, pt.typeSymbol))
+
// SelectFromTypeTree
def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = {
val hiBound = qual.tpe.bounds.hi
@@ -678,7 +692,6 @@ trait ContextErrors {
issueNormalTypeError(expandee, s"macro in $role role can only expand into $allowedExpansions")
}
- // same reason as for MacroBodyTypecheckException
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
@@ -689,15 +702,24 @@ trait ContextErrors {
throw MacroExpansionException
}
- def MacroPartialApplicationErrorMessage = "macros cannot be partially applied"
- def MacroPartialApplicationError(expandee: Tree) = {
+ private def macroExpansionError2(expandee: Tree, msg: String) = {
// macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
// kinda contradictory to the comment in `macroExpansionError`, but this is how it works
- issueNormalTypeError(expandee, MacroPartialApplicationErrorMessage)
+ issueNormalTypeError(expandee, msg)
setError(expandee)
throw MacroExpansionException
}
+ private def MacroTooFewArgumentListsMessage = "too few argument lists for macro invocation"
+ def MacroTooFewArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooFewArgumentListsMessage)
+
+ private def MacroTooManyArgumentListsMessage = "too many argument lists for macro invocation"
+ def MacroTooManyArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooManyArgumentListsMessage)
+
+ def MacroTooFewArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too few arguments for macro invocation")
+
+ def MacroTooManyArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too many arguments for macro invocation")
+
def MacroGeneratedAbort(expandee: Tree, ex: AbortMacroException) = {
// errors have been reported by the macro itself, so we do nothing here
macroLogVerbose("macro expansion has been aborted")
@@ -760,7 +782,7 @@ trait ContextErrors {
}
def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = {
- val expected = "expr"
+ val expected = "expr or tree"
val isPathMismatch = expanded != null && expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
macroExpansionError(expandee,
s"macro must return a compiler-specific $expected; returned value is " + (
@@ -830,7 +852,7 @@ trait ContextErrors {
underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
location + explanation
}
- NormalTypeError(tree, errMsg, ErrorKinds.Access)
+ AccessTypeError(tree, errMsg)
}
def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) =
@@ -875,7 +897,7 @@ trait ContextErrors {
"argument types " + argtpes.mkString("(", ",", ")") +
(if (pt == WildcardType) "" else " and expected result type " + pt)
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
setErrorOnLastTry(lastTry, tree)
} else setError(tree) // do not even try further attempts because they should all fail
// even if this is not the last attempt (because of the SO's possibility on the horizon)
@@ -883,13 +905,13 @@ trait ContextErrors {
}
def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt)))
setErrorOnLastTry(lastTry, tree)
}
def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
setErrorOnLastTry(lastTry, tree)
}
@@ -903,7 +925,7 @@ trait ContextErrors {
kindErrors.toList.mkString("\n", ", ", ""))
}
- private[ContextErrors] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
+ private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
if (explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
(targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
@@ -937,33 +959,10 @@ trait ContextErrors {
def IncompatibleScrutineeTypeError(tree: Tree, pattp: Type, pt: Type) =
issueNormalTypeError(tree, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
- def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) = {
- def errMsg = {
- val sym = pat.tpe.typeSymbol
- val clazz = sym.companionClass
- val addendum = (
- if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) {
- // TODO: move these somewhere reusable.
- val typeString = clazz.typeParams match {
- case Nil => "" + clazz.name
- case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
- }
- val caseString = (
- clazz.caseFieldAccessors
- map (_ => "_") // could use the actual param names here
- mkString (clazz.name + "(", ",", ")")
- )
- (
- "\nNote: if you intended to match against the class, try `case _: " +
- typeString + "` or `case " + caseString + "`"
- )
- }
- else ""
- )
- "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum
- }
- issueNormalTypeError(pat, errMsg)
- }
+ def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) =
+ issueNormalTypeError(pat,
+ "pattern type is incompatible with expected type"+ foundReqMsg(pat.tpe, pt) +
+ typePatternAdvice(pat.tpe.typeSymbol, pt1.typeSymbol))
def PolyAlternativeError(tree: Tree, argtypes: List[Type], sym: Symbol, err: PolyAlternativeErrorKind.ErrorType) = {
import PolyAlternativeErrorKind._
@@ -1108,11 +1107,11 @@ trait ContextErrors {
def AbstractMemberWithModiferError(sym: Symbol, flag: Int) =
- issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier")
+ issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag.toLong) + " modifier")
def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) =
issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format(
- Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym))
+ Flags.flagsToString(flag1.toLong), Flags.flagsToString(flag2.toLong), sym))
def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = {
val errorAddendum =
@@ -1154,7 +1153,7 @@ trait ContextErrors {
// failures which have nothing to do with implicit conversions
// per se, but which manifest as implicit conversion conflicts
// involving Any, are further explained from foundReqMsg.
- if (AnyRefClass.tpe <:< req) (
+ if (AnyRefTpe <:< req) (
if (sym == AnyClass || sym == UnitClass) (
sm"""|Note: ${sym.name} is not implicitly converted to AnyRef. You can safely
|pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."""
@@ -1170,11 +1169,11 @@ trait ContextErrors {
sm"""|Note that implicit conversions are not applicable because they are ambiguous:
|${coreMsg}are possible conversion functions from $found to $req"""
}
- typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+ typeErrorMsg(found, req) + (
if (explanation == "") "" else "\n" + explanation
)
}
- context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree,
if (isView) viewMsg
else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
)
@@ -1182,9 +1181,7 @@ trait ContextErrors {
}
def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) =
- issueDivergentImplicitsError(tree,
- "diverging implicit expansion for type "+pt+"\nstarting with "+
- sym.fullLocationString)
+ issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
}
object NamesDefaultsErrorsGen {
@@ -1235,137 +1232,4 @@ trait ContextErrors {
setError(arg)
}
}
-
- // using an exception here is actually a good idea
- // because the lifespan of this exception is extremely small and controlled
- // moreover exceptions let us avoid an avalanche of "if (!hasError) do stuff" checks
- case object MacroBodyTypecheckException extends Exception with scala.util.control.ControlThrowable
-
- trait MacroErrors {
- self: MacroTyper =>
-
- private implicit val context0 = typer.context
- val context = typer.context
-
- // helpers
-
- private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
- val noun = if (flavor == "value") "parameter" else "type parameter"
- val message = noun + " lists have different length, " + violation + " extra " + noun
- val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
- message + suffix
- }
-
- private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
-
- private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
- var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
- if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
- var retPart = restpe.toString
- if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
- argsPart + ": " + retPart
- }
-
- // not exactly an error generator, but very related
- // and I dearly wanted to push it away from Macros.scala
- private def checkSubType(slot: String, rtpe: Type, atpe: Type) = {
- val ok = if (macroDebugVerbose) {
- withTypesExplained(rtpe <:< atpe)
- } else rtpe <:< atpe
- if (!ok) {
- if (!macroDebugVerbose)
- explainTypes(rtpe, atpe)
- compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
- }
- }
-
- // errors
-
- private def fail() = {
- // need to set the IS_ERROR flag to prohibit spurious expansions
- if (macroDef != null) macroDef setFlag IS_ERROR
- // not setting ErrorSymbol as in `infer.setError`, because we still need to know that it's a macro
- // otherwise assignTypeToTree in Namers might fail if macroDdef.tpt == EmptyTree
- macroDdef setType ErrorType
- throw MacroBodyTypecheckException
- }
-
- private def genericError(tree: Tree, message: String) = {
- issueNormalTypeError(tree, message)
- fail()
- }
-
- private def implRefError(message: String) = {
- val treeInfo.Applied(implRef, _, _) = macroDdef.rhs
- genericError(implRef, message)
- }
-
- private def compatibilityError(message: String) =
- implRefError(
- "macro implementation has wrong shape:"+
- "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
- "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
- "\n" + message)
-
- // Phase I: sanity checks
-
- def MacroDefIsFastTrack() = {
- macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
- assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
- throw MacroBodyTypecheckException // don't call fail, because we don't need IS_ERROR
- }
-
- def MacroFeatureNotEnabled() = {
- macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
- fail()
- }
-
- // Phase II: typecheck the right-hand side of the macro def
-
- // do nothing, just fail. relevant typecheck errors have already been reported
- def MacroDefUntypeableBodyError() = fail()
-
- def MacroDefInvalidBodyError() = genericError(macroDdef, "macro body has wrong shape:\n required: macro [<implementation object>].<method name>[[<type args>]]")
-
- def MacroImplNotPublicError() = implRefError("macro implementation must be public")
-
- def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
-
- def MacroImplWrongNumberOfTypeArgumentsError(macroImplRef: Tree) = implRefError(typer.TyperErrorGen.TypedApplyWrongNumberOfTpeParametersErrorMessage(macroImplRef))
-
- def MacroImplNotStaticError() = implRefError("macro implementation must be in statically accessible object")
-
- // Phase III: check compatibility between the macro def and its macro impl
- // aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
- // rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
-
- def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
-
- def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
-
- def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
-
- def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
-
- def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
-
- def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkSubType("return type", atpe, rret)
-
- def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
-
- def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
- if (isRepeated(rparam) && !isRepeated(aparam))
- compatibilityError("types incompatible for parameter " + rparam.name + ": corresponding is not a vararg parameter")
- if (!isRepeated(rparam) && isRepeated(aparam))
- compatibilityError("types incompatible for parameter " + aparam.name + ": corresponding is not a vararg parameter")
- }
-
- def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
- compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes))
-
- def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
- compatibilityError(
- "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
- ex.getMessage)
- }
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index e89a860e0f..61967d4cee 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package typechecker
-import scala.collection.mutable
+import scala.collection.{ immutable, mutable }
import scala.annotation.tailrec
import scala.reflect.internal.util.shortClassOfInstance
@@ -17,15 +17,20 @@ import scala.reflect.internal.util.shortClassOfInstance
trait Contexts { self: Analyzer =>
import global._
import definitions.{ JavaLangPackage, ScalaPackage, PredefModule }
+ import ContextMode._
- object NoContext extends Context {
- outer = this
+ object NoContext
+ extends Context(EmptyTree, NoSymbol, EmptyScope, NoCompilationUnit,
+ null) { // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
enclClass = this
enclMethod = this
+ override val depth = 0
override def nextEnclosing(p: Context => Boolean): Context = this
override def enclosingContextChain: List[Context] = Nil
override def implicitss: List[List[ImplicitInfo]] = Nil
+ override def imports: List[ImportInfo] = Nil
+ override def firstImport: Option[ImportInfo] = None
override def toString = "NoContext"
}
private object RootImports {
@@ -81,282 +86,391 @@ trait Contexts { self: Analyzer =>
if (settings.noimports) Nil
else if (unit.isJava) RootImports.javaList
- else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList
+ else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) {
+ debuglog("Omitted import of Predef._ for " + unit)
+ RootImports.javaAndScalaList
+ }
else RootImports.completeList
}
- def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, erasedTypes = false)
- def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, erasedTypes = false)
- def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
- var sc = startContext
- for (sym <- rootImports(unit)) {
- sc = sc.makeNewImport(sym)
- sc.depth += 1
- }
- val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
+ def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, erasedTypes: Boolean = false): Context = {
+ val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym)))
+ val c = rootImportsContext.make(tree, unit = unit)
if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
- c.implicitsEnabled = !erasedTypes
- c.enrichmentEnabled = c.implicitsEnabled
+ c(EnrichmentEnabled | ImplicitsEnabled) = !erasedTypes
c
}
def resetContexts() {
- var sc = startContext
- while (sc != NoContext) {
- sc.tree match {
+ startContext.enclosingContextChain foreach { context =>
+ context.tree match {
case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
case _ =>
}
- sc = sc.outer
+ context.reportBuffer.clearAll()
}
}
- private object Errors {
- final val ReportErrors = 1 << 0
- final val BufferErrors = 1 << 1
- final val AmbiguousErrors = 1 << 2
- final val notThrowMask = ReportErrors | BufferErrors
- final val AllMask = ReportErrors | BufferErrors | AmbiguousErrors
- }
+ /**
+ * A motley collection of the state and loosely associated behaviour of the type checker.
+ * Each `Typer` has an associated context, and as it descends into the tree new `(Typer, Context)`
+ * pairs are spawned.
+ *
+ * Meet the crew; first the state:
+ *
+ * - A tree, symbol, and scope representing the focus of the typechecker
+ * - An enclosing context, `outer`.
+ * - The current compilation unit.
+ * - A variety of bits that track the current error reporting policy (more on this later);
+ * whether or not implicits/macros are enabled, whether we are in a self or super call or
+ * in a constructor suffix. These are represented as bits in the mask `contextMode`.
+ * - Some odds and ends: undetermined type pararameters of the current line of type inference;
+ * contextual augmentation for error messages, tracking of the nesting depth.
+ *
+ * And behaviour:
+ *
+ * - The central point for issuing errors and warnings from the typechecker, with a means
+ * to buffer these for use in 'silent' type checking, when some recovery might be possible.
+ * - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain`
+ * is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`)
+ * and to collect in-scope implicit defintions (`implicitss`)
+ * Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain.
+ * - In a similar vein, we can assess accessiblity (`isAccessible`.)
+ *
+ * More on error buffering:
+ * When are type errors recoverable? In quite a few places, it turns out. Some examples:
+ * trying to type an application with/without the expected type, or with/without implicit views
+ * enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`.
+ *
+ * Intially, starting from the `typer` phase, the contexts either buffer or report errors;
+ * afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
+ * fine grained control is needed based on the kind of error; ambiguity errors are often
+ * suppressed during exploraratory typing, such as determining whether `a == b` in an argument
+ * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
+ * applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
+ * a function type with/without implicit views.
+ *
+ * When the error policies entails error/warning buffering, the mutable [[ReportBuffer]] records
+ * everything that is issued. It is important to note, that child Contexts created with `make`
+ * "inherit" the very same `ReportBuffer` instance, whereas children spawned through `makeSilent`
+ * receive an separate, fresh buffer.
+ *
+ * @param tree Tree associated with this context
+ * @param owner The current owner
+ * @param scope The current scope
+ * @param _outer The next outer context.
+ */
+ class Context private[typechecker](val tree: Tree, val owner: Symbol, val scope: Scope,
+ val unit: CompilationUnit, _outer: Context) {
+ private def outerIsNoContext = _outer eq null
+ final def outer: Context = if (outerIsNoContext) NoContext else _outer
- class Context private[typechecker] {
- import Errors._
-
- var unit: CompilationUnit = NoCompilationUnit
- var tree: Tree = _ // Tree associated with this context
- var owner: Symbol = NoSymbol // The current owner
- var scope: Scope = _ // The current scope
- var outer: Context = _ // The next outer context
- var enclClass: Context = _ // The next outer context whose tree is a
- // template or package definition
- @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ /** The next outer context whose tree is a template or package definition */
+ var enclClass: Context = _
+
+ @inline private def savingEnclClass[A](c: Context)(a: => A): A = {
val saved = enclClass
enclClass = c
try a finally enclClass = saved
}
- var enclMethod: Context = _ // The next outer context whose tree is a method
- var variance: Variance = Variance.Invariant // Variance relative to enclosing class
- private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
- // not inherited to child contexts
- var depth: Int = 0
- var imports: List[ImportInfo] = List() // currently visible imports
- var openImplicits: List[(Type,Tree)] = List() // types for which implicit arguments
- // are currently searched
- // for a named application block (Tree) the corresponding NamedApplyInfo
+ /** A bitmask containing all the boolean flags in a context, e.g. are implicit views enabled */
+ var contextMode: ContextMode = ContextMode.DefaultMode
+
+ /** Update all modes in `mask` to `value` */
+ def update(mask: ContextMode, value: Boolean) {
+ contextMode = contextMode.set(value, mask)
+ }
+
+ /** Set all modes in the mask `enable` to true, and all in `disable` to false. */
+ def set(enable: ContextMode = NOmode, disable: ContextMode = NOmode): this.type = {
+ contextMode = contextMode.set(true, enable).set(false, disable)
+ this
+ }
+
+ /** Is this context in all modes in the given `mask`? */
+ def apply(mask: ContextMode): Boolean = contextMode.inAll(mask)
+
+ /** The next outer context whose tree is a method */
+ var enclMethod: Context = _
+
+ /** Variance relative to enclosing class */
+ var variance: Variance = Variance.Invariant
+
+ private var _undetparams: List[Symbol] = List()
+
+ protected def outerDepth = if (outerIsNoContext) 0 else outer.depth
+
+ val depth: Int = {
+ val increasesDepth = isRootImport || outerIsNoContext || (outer.scope != scope)
+ ( if (increasesDepth) 1 else 0 ) + outerDepth
+ }
+
+ /** The currently visible imports */
+ def imports: List[ImportInfo] = outer.imports
+ /** Equivalent to `imports.headOption`, but more efficient */
+ def firstImport: Option[ImportInfo] = outer.firstImport
+ def isRootImport: Boolean = false
+
+ /** Types for which implicit arguments are currently searched */
+ var openImplicits: List[OpenImplicit] = List()
+
+ /* For a named application block (`Tree`) the corresponding `NamedApplyInfo`. */
var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
var prefix: Type = NoPrefix
- var inConstructorSuffix = false // are we in a secondary constructor
- // after the this constructor call?
- var returnsSeen = false // for method context: were returns encountered?
- var inSelfSuperCall = false // is this context (enclosed in) a constructor call?
- // (the call to the super or self constructor in the first line of a constructor)
- // in this context the object's fields should not be in scope
-
- var diagnostic: List[String] = Nil // these messages are printed when issuing an error
- var implicitsEnabled = false
- var macrosEnabled = true
- var enrichmentEnabled = false // to selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed
- var checking = false
- var retyping = false
-
- var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
- // for type parameters which are narrowed in a GADT
+ def inSuperInit_=(value: Boolean) = this(SuperInit) = value
+ def inSuperInit = this(SuperInit)
+ def inConstructorSuffix_=(value: Boolean) = this(ConstructorSuffix) = value
+ def inConstructorSuffix = this(ConstructorSuffix)
+ def inPatAlternative_=(value: Boolean) = this(PatternAlternative) = value
+ def inPatAlternative = this(PatternAlternative)
+ def starPatterns_=(value: Boolean) = this(StarPatterns) = value
+ def starPatterns = this(StarPatterns)
+ def returnsSeen_=(value: Boolean) = this(ReturnsSeen) = value
+ def returnsSeen = this(ReturnsSeen)
+ def inSelfSuperCall_=(value: Boolean) = this(SelfSuperCall) = value
+ def inSelfSuperCall = this(SelfSuperCall)
+ def implicitsEnabled_=(value: Boolean) = this(ImplicitsEnabled) = value
+ def implicitsEnabled = this(ImplicitsEnabled)
+ def macrosEnabled_=(value: Boolean) = this(MacrosEnabled) = value
+ def macrosEnabled = this(MacrosEnabled)
+ def enrichmentEnabled_=(value: Boolean) = this(EnrichmentEnabled) = value
+ def enrichmentEnabled = this(EnrichmentEnabled)
+ def checking_=(value: Boolean) = this(Checking) = value
+ def checking = this(Checking)
+ def retyping_=(value: Boolean) = this(ReTyping) = value
+ def retyping = this(ReTyping)
+ def inSecondTry = this(SecondTry)
+ def inSecondTry_=(value: Boolean) = this(SecondTry) = value
+ def inReturnExpr = this(ReturnExpr)
+ def inTypeConstructorAllowed = this(TypeConstructorAllowed)
+
+ def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode
+
+ /** These messages are printed when issuing an error */
+ var diagnostic: List[String] = Nil
+
+ /** Saved type bounds for type parameters which are narrowed in a GADT. */
+ var savedTypeBounds: List[(Symbol, Type)] = List()
+
+ /** Indentation level, in columns, for output under -Ytyper-debug */
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
- var buffer: mutable.Set[AbsTypeError] = _
- var warningsBuffer: mutable.Set[(Position, String)] = _
-
+ /** The next enclosing context (potentially `this`) that is owned by a class or method */
def enclClassOrMethod: Context =
if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
else outer.enclClassOrMethod
+ /** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
def enclosingCaseDef = nextEnclosing(_.tree.isInstanceOf[CaseDef])
+
+ /** ...or an Apply. */
+ def enclosingApply = nextEnclosing(_.tree.isInstanceOf[Apply])
+
+ //
+ // Tracking undetermined type parameters for type argument inference.
+ //
def undetparamsString =
if (undetparams.isEmpty) ""
else undetparams.mkString("undetparams=", ", ", "")
- def undetparams = _undetparams
+ /** Undetermined type parameters. See `Infer#{inferExprInstance, adjustTypeArgs}`. Not inherited to child contexts */
+ def undetparams: List[Symbol] = _undetparams
def undetparams_=(ps: List[Symbol]) = { _undetparams = ps }
- def extractUndetparams() = {
+ /** Return and clear the undetermined type parameters */
+ def extractUndetparams(): List[Symbol] = {
val tparams = undetparams
undetparams = List()
tparams
}
- private[this] var mode = 0
-
- def errBuffer = buffer
- def hasErrors = buffer.nonEmpty
- def hasWarnings = warningsBuffer.nonEmpty
-
- def state: Int = mode
- def restoreState(state0: Int) = mode = state0
-
- def reportErrors = (state & ReportErrors) != 0
- def bufferErrors = (state & BufferErrors) != 0
- def ambiguousErrors = (state & AmbiguousErrors) != 0
- def throwErrors = (state & notThrowMask) == 0
-
- def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
- def setBufferErrors() = {
- //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
- mode = BufferErrors
+ /** Run `body` with this context with no undetermined type parameters, restore the original
+ * the original list afterwards.
+ * @param reportAmbiguous Should ambiguous errors be reported during evaluation of `body`?
+ */
+ def savingUndeterminedTypeParams[A](reportAmbiguous: Boolean = ambiguousErrors)(body: => A): A = {
+ withMode() {
+ this(AmbiguousErrors) = reportAmbiguous
+ val saved = extractUndetparams()
+ try body
+ finally undetparams = saved
+ }
}
- def setThrowErrors() = mode &= (~AllMask)
- def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
- def updateBuffer(errors: mutable.Set[AbsTypeError]) = buffer ++= errors
- def condBufferFlush(removeP: AbsTypeError => Boolean) {
- val elems = buffer.filter(removeP)
- buffer --= elems
- }
- def flushBuffer() { buffer.clear() }
- def flushAndReturnBuffer(): mutable.Set[AbsTypeError] = {
- val current = buffer.clone()
- buffer.clear()
- current
- }
- def flushAndReturnWarningsBuffer(): mutable.Set[(Position, String)] = {
- val current = warningsBuffer.clone()
- warningsBuffer.clear()
+ //
+ // Error reporting policies and buffer.
+ //
+
+ private var _reportBuffer: ReportBuffer = new ReportBuffer
+ /** A buffer for errors and warnings, used with `this.bufferErrors == true` */
+ def reportBuffer = _reportBuffer
+ /** Discard the current report buffer, and replace with an empty one */
+ def useFreshReportBuffer() = _reportBuffer = new ReportBuffer
+ /** Discard the current report buffer, and replace with `other` */
+ def restoreReportBuffer(other: ReportBuffer) = _reportBuffer = other
+
+ /** The first error, if any, in the report buffer */
+ def firstError: Option[AbsTypeError] = reportBuffer.firstError
+ /** Does the report buffer contain any errors? */
+ def hasErrors = reportBuffer.hasErrors
+
+ def reportErrors = this(ReportErrors)
+ def bufferErrors = this(BufferErrors)
+ def ambiguousErrors = this(AmbiguousErrors)
+ def throwErrors = contextMode.inNone(ReportErrors | BufferErrors)
+
+ def setReportErrors(): Unit = set(enable = ReportErrors | AmbiguousErrors, disable = BufferErrors)
+ def setBufferErrors(): Unit = set(enable = BufferErrors, disable = ReportErrors | AmbiguousErrors)
+ def setThrowErrors(): Unit = this(ReportErrors | AmbiguousErrors | BufferErrors) = false
+ def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report
+
+ /** Append the given errors to the report buffer */
+ def updateBuffer(errors: Traversable[AbsTypeError]) = reportBuffer ++= errors
+ /** Clear all errors from the report buffer */
+ def flushBuffer() { reportBuffer.clearAllErrors() }
+ /** Return and clear all errors from the report buffer */
+ def flushAndReturnBuffer(): immutable.Seq[AbsTypeError] = {
+ val current = reportBuffer.errors
+ reportBuffer.clearAllErrors()
current
}
- def withImplicitsEnabled[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = true
- try op
- finally implicitsEnabled = saved
- }
-
- def withImplicitsDisabled[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = false
- val savedP = enrichmentEnabled
- enrichmentEnabled = false
- try op
- finally {
- implicitsEnabled = saved
- enrichmentEnabled = savedP
+ /** Issue and clear all warnings from the report buffer */
+ def flushAndIssueWarnings() {
+ reportBuffer.warnings foreach {
+ case (pos, msg) => unit.warning(pos, msg)
}
+ reportBuffer.clearAllWarnings()
}
- def withImplicitsDisabledAllowEnrichment[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = false
- val savedP = enrichmentEnabled
- enrichmentEnabled = true
+ //
+ // Temporary mode adjustment
+ //
+
+ @inline def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = {
+ val saved = contextMode
+ set(enabled, disabled)
try op
- finally {
- implicitsEnabled = saved
- enrichmentEnabled = savedP
- }
+ finally contextMode = saved
}
- def withMacrosEnabled[T](op: => T): T = {
- val saved = macrosEnabled
- macrosEnabled = true
- try op
- finally macrosEnabled = saved
+ @inline final def withImplicitsEnabled[T](op: => T): T = withMode(enabled = ImplicitsEnabled)(op)
+ @inline final def withImplicitsDisabled[T](op: => T): T = withMode(disabled = ImplicitsEnabled | EnrichmentEnabled)(op)
+ @inline final def withImplicitsDisabledAllowEnrichment[T](op: => T): T = withMode(enabled = EnrichmentEnabled, disabled = ImplicitsEnabled)(op)
+ @inline final def withMacrosEnabled[T](op: => T): T = withMode(enabled = MacrosEnabled)(op)
+ @inline final def withMacrosDisabled[T](op: => T): T = withMode(disabled = MacrosEnabled)(op)
+ @inline final def withinStarPatterns[T](op: => T): T = withMode(enabled = StarPatterns)(op)
+ @inline final def withinSuperInit[T](op: => T): T = withMode(enabled = SuperInit)(op)
+ @inline final def withinSecondTry[T](op: => T): T = withMode(enabled = SecondTry)(op)
+ @inline final def withinPatAlternative[T](op: => T): T = withMode(enabled = PatternAlternative)(op)
+
+ /** TypeConstructorAllowed is enabled when we are typing a higher-kinded type.
+ * adapt should then check kind-arity based on the prototypical type's kind
+ * arity. Type arguments should not be inferred.
+ */
+ @inline final def withinTypeConstructorAllowed[T](op: => T): T = withMode(enabled = TypeConstructorAllowed)(op)
+
+ /* TODO - consolidate returnsSeen (which seems only to be used by checkDead)
+ * and ReturnExpr.
+ */
+ @inline final def withinReturnExpr[T](op: => T): T = {
+ enclMethod.returnsSeen = true
+ withMode(enabled = ReturnExpr)(op)
}
- def withMacrosDisabled[T](op: => T): T = {
- val saved = macrosEnabled
- macrosEnabled = false
- try op
- finally macrosEnabled = saved
+ // See comment on FormerNonStickyModes.
+ @inline final def withOnlyStickyModes[T](op: => T): T = withMode(disabled = FormerNonStickyModes)(op)
+
+ /** @return true if the `expr` evaluates to true within a silent Context that incurs no errors */
+ @inline final def inSilentMode(expr: => Boolean): Boolean = {
+ withMode() { // withMode with no arguments to restore the mode mutated by `setBufferErrors`.
+ setBufferErrors()
+ try expr && !hasErrors
+ finally reportBuffer.clearAll()
+ }
}
- def make(unit: CompilationUnit, tree: Tree, owner: Symbol,
- scope: Scope, imports: List[ImportInfo]): Context = {
- val c = new Context
- c.unit = unit
- c.tree = tree
- c.owner = owner
- c.scope = scope
- c.outer = this
-
- tree match {
- case Template(_, _, _) | PackageDef(_, _) =>
- c.enclClass = c
- c.prefix = c.owner.thisType
- c.inConstructorSuffix = false
- case _ =>
- c.enclClass = this.enclClass
- c.prefix =
- if (c.owner != this.owner && c.owner.isTerm) NoPrefix
- else this.prefix
- c.inConstructorSuffix = this.inConstructorSuffix
+ //
+ // Child Context Creation
+ //
+
+ /**
+ * Construct a child context. The parent and child will share the report buffer.
+ * Compare with `makeSilent`, in which the child has a fresh report buffer.
+ *
+ * If `tree` is an `Import`, that import will be avaiable at the head of
+ * `Context#imports`.
+ */
+ def make(tree: Tree = tree, owner: Symbol = owner,
+ scope: Scope = scope, unit: CompilationUnit = unit): Context = {
+ val isTemplateOrPackage = tree match {
+ case _: Template | _: PackageDef => true
+ case _ => false
}
- tree match {
- case DefDef(_, _, _, _, _, _) =>
- c.enclMethod = c
- case _ =>
- c.enclMethod = this.enclMethod
+ val isDefDef = tree match {
+ case _: DefDef => true
+ case _ => false
}
- c.variance = this.variance
- c.depth = if (scope == this.scope) this.depth else this.depth + 1
- c.imports = imports
- c.inSelfSuperCall = inSelfSuperCall
- c.restoreState(this.state)
- c.diagnostic = this.diagnostic
- c.typingIndentLevel = typingIndentLevel
- c.implicitsEnabled = this.implicitsEnabled
- c.macrosEnabled = this.macrosEnabled
- c.enrichmentEnabled = this.enrichmentEnabled
- c.checking = this.checking
- c.retyping = this.retyping
- c.openImplicits = this.openImplicits
- c.buffer = if (this.buffer == null) mutable.LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
- c.warningsBuffer = if (this.warningsBuffer == null) mutable.LinkedHashSet[(Position, String)]() else this.warningsBuffer
+ val isImport = tree match {
+ case _: Import => true
+ case _ => false
+ }
+ val sameOwner = owner == this.owner
+ val prefixInChild =
+ if (isTemplateOrPackage) owner.thisType
+ else if (!sameOwner && owner.isTerm) NoPrefix
+ else prefix
+
+ // The blank canvas
+ val c = if (isImport)
+ new Context(tree, owner, scope, unit, this) with ImportContext
+ else
+ new Context(tree, owner, scope, unit, this)
+
+ // Fields that are directly propagated
+ c.variance = variance
+ c.diagnostic = diagnostic
+ c.typingIndentLevel = typingIndentLevel
+ c.openImplicits = openImplicits
+ c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below.
+ c._reportBuffer = reportBuffer
+
+ // Fields that may take on a different value in the child
+ c.prefix = prefixInChild
+ c.enclClass = if (isTemplateOrPackage) c else enclClass
+ c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix)
+ c.enclMethod = if (isDefDef) c else enclMethod
+
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
}
- def makeNewImport(sym: Symbol): Context =
- makeNewImport(gen.mkWildcardImport(sym))
-
- def makeNewImport(imp: Import): Context = {
- val impInfo = new ImportInfo(imp, depth)
- if (settings.lint && imp.pos.isDefined) // pos.isDefined excludes java.lang/scala/Predef imports
- allImportInfos(unit) ::= impInfo
-
- make(unit, imp, owner, scope, impInfo :: imports)
- }
-
def make(tree: Tree, owner: Symbol, scope: Scope): Context =
+ // TODO SI-7345 Moving this optimization into the main overload of `make` causes all tests to fail.
+ // even if it is extened to check that `unit == this.unit`. Why is this?
if (tree == this.tree && owner == this.owner && scope == this.scope) this
- else make0(tree, owner, scope)
-
- private def make0(tree: Tree, owner: Symbol, scope: Scope): Context =
- make(unit, tree, owner, scope, imports)
+ else make(tree, owner, scope, unit)
+ /** Make a child context that represents a new nested scope */
def makeNewScope(tree: Tree, owner: Symbol): Context =
make(tree, owner, newNestedScope(scope))
- // IDE stuff: distinguish between scopes created for typing and scopes created for naming.
-
- def make(tree: Tree, owner: Symbol): Context =
- make0(tree, owner, scope)
- def make(tree: Tree): Context =
- make(tree, owner)
-
- def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
+ /** Make a child context that buffers errors and warnings into a fresh report buffer. */
+ def makeSilent(reportAmbiguousErrors: Boolean = ambiguousErrors, newtree: Tree = tree): Context = {
val c = make(newtree)
c.setBufferErrors()
c.setAmbiguousErrors(reportAmbiguousErrors)
- c.buffer = mutable.LinkedHashSet[AbsTypeError]()
+ c._reportBuffer = new ReportBuffer // A fresh buffer so as not to leak errors/warnings into `this`.
c
}
+ /** Make a silent child context does not allow implicits. Used to prevent chaining of implicit views. */
def makeImplicit(reportAmbiguousErrors: Boolean) = {
val c = makeSilent(reportAmbiguousErrors)
- c.implicitsEnabled = false
- c.enrichmentEnabled = false
+ c(ImplicitsEnabled | EnrichmentEnabled) = false
c
}
@@ -371,12 +485,10 @@ trait Contexts { self: Analyzer =>
* accessible.
*/
def makeConstructorContext = {
- var baseContext = enclClass.outer
- while (baseContext.tree.isInstanceOf[Template])
- baseContext = baseContext.outer
+ val baseContext = enclClass.outer.nextEnclosing(!_.tree.isInstanceOf[Template])
val argContext = baseContext.makeNewScope(tree, owner)
+ argContext.contextMode = contextMode
argContext.inSelfSuperCall = true
- argContext.restoreState(this.state)
def enterElems(c: Context) {
def enterLocalElems(e: ScopeEntry) {
if (e != null && e.owner == c.scope) {
@@ -384,7 +496,7 @@ trait Contexts { self: Analyzer =>
argContext.scope enter e.sym
}
}
- if (c.owner.isTerm && !c.owner.isLocalDummy) {
+ if (c.isLocal && !c.owner.isLocalDummy) {
enterElems(c.outer)
enterLocalElems(c.scope.elems)
}
@@ -395,6 +507,10 @@ trait Contexts { self: Analyzer =>
argContext
}
+ //
+ // Error and warning issuance
+ //
+
private def addDiagString(msg: String) = {
val ds =
if (diagnostic.isEmpty) ""
@@ -411,16 +527,18 @@ trait Contexts { self: Analyzer =>
(new Exception).printStackTrace()
}
if (pf isDefinedAt err) pf(err)
- else if (bufferErrors) { buffer += err }
+ else if (bufferErrors) { reportBuffer += err }
else throw new TypeError(err.errPos, err.errMsg)
}
+ /** Issue/buffer/throw the given type error according to the current mode for error reporting. */
def issue(err: AbsTypeError) {
issueCommon(err) { case _ if reportErrors =>
unitError(err.errPos, addDiagString(err.errMsg))
}
}
+ /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
issueCommon(err) { case _ if ambiguousErrors =>
if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
@@ -428,35 +546,31 @@ trait Contexts { self: Analyzer =>
}
}
+ /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
def issueAmbiguousError(err: AbsTypeError) {
issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
}
- // TODO remove
+ /** Issue/throw the given `err` according to the current mode for error reporting. */
def error(pos: Position, err: Throwable) =
if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
else throw err
+ /** Issue/throw the given error message according to the current mode for error reporting. */
def error(pos: Position, msg: String) = {
val msg1 = addDiagString(msg)
if (reportErrors) unitError(pos, msg1)
else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String): Unit = warning(pos, msg, force = false)
- def warning(pos: Position, msg: String, force: Boolean) {
+ /** Issue/throw the given error message according to the current mode for error reporting. */
+ def warning(pos: Position, msg: String, force: Boolean = false) {
if (reportErrors || force) unit.warning(pos, msg)
- else if (bufferErrors) warningsBuffer += ((pos, msg))
+ else if (bufferErrors) reportBuffer += (pos -> msg)
}
- def isLocal(): Boolean = tree match {
- case Block(_,_) => true
- case PackageDef(_, _) => false
- case EmptyTree => false
- case _ => outer.isLocal()
- }
-
- def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
+ /** Is the owning symbol of this context a term? */
+ final def isLocal: Boolean = owner.isTerm
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
@@ -482,14 +596,34 @@ trait Contexts { self: Analyzer =>
def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
- owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
- )
- /** Is `sub` a subclass of `base` or a companion object of such a subclass?
- */
- def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
+ private def treeTruncated = tree.toString.replaceAll("\\s+", " ").lines.mkString("\\n").take(70)
+ private def treeIdString = if (settings.uniqid.value) "#" + System.identityHashCode(tree).toString.takeRight(3) else ""
+ private def treeString = tree match {
+ case x: Import => "" + x
+ case Template(parents, `emptyValDef`, body) =>
+ val pstr = if ((parents eq null) || parents.isEmpty) "Nil" else parents mkString " "
+ val bstr = if (body eq null) "" else body.length + " stats"
+ s"""Template($pstr, _, $bstr)"""
+ case x => s"${tree.shortClass}${treeIdString}:${treeTruncated}"
+ }
+
+ override def toString =
+ sm"""|Context($unit) {
+ | owner = $owner
+ | tree = $treeString
+ | scope = ${scope.size} decls
+ | contextMode = $contextMode
+ | outer.owner = ${outer.owner}
+ |}"""
+
+ //
+ // Accessibility checking
+ //
+
+ /** Is `sub` a subclass of `base` or a companion object of such a subclass? */
+ private def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
sub.isNonBottomSubClass(base) ||
- sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
+ sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
/** Return the closest enclosing context that defines a subclass of `clazz`
* or a companion object thereof, or `NoContext` if no such context exists.
@@ -501,8 +635,7 @@ trait Contexts { self: Analyzer =>
c
}
- /** Is `sym` accessible as a member of `pre` in current context?
- */
+ /** Is `sym` accessible as a member of `pre` in current context? */
def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
@@ -515,7 +648,7 @@ trait Contexts { self: Analyzer =>
(linked ne NoSymbol) && accessWithin(linked)
}
- /** Are we inside definition of `ab`? */
+ /* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
// #3663: we must disregard package nesting if sym isJavaDefined
if (sym.isJavaDefined) {
@@ -582,6 +715,10 @@ trait Contexts { self: Analyzer =>
}
}
+ //
+ // Type bound management
+ //
+
def pushTypeBounds(sym: Symbol) {
sym.info match {
case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb")
@@ -618,6 +755,10 @@ trait Contexts { self: Analyzer =>
}
}
+ //
+ // Implicit collection
+ //
+
private var implicitsCache: List[List[ImplicitInfo]] = null
private var implicitsRunId = NoRunId
@@ -677,6 +818,8 @@ trait Contexts { self: Analyzer =>
* filtered out later by `eligibleInfos` (SI-4270 / 9129cfe9), as they don't type-check.
*/
def implicitss: List[List[ImplicitInfo]] = {
+ val imports = this.imports
+ val nextOuter = this.nextOuter
if (implicitsRunId != currentRunId) {
implicitsRunId = currentRunId
implicitsCache = List()
@@ -693,8 +836,8 @@ trait Contexts { self: Analyzer =>
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
debuglog("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope, NoPrefix)
- } else if (imports != nextOuter.imports) {
- assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports))
+ } else if (firstImport != nextOuter.firstImport) {
+ assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports))
collectImplicitImports(imports.head)
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
@@ -706,6 +849,10 @@ trait Contexts { self: Analyzer =>
implicitsCache
}
+ //
+ // Imports and symbol lookup
+ //
+
/** It's possible that seemingly conflicting identifiers are
* identifiably the same after type normalization. In such cases,
* allow compilation to proceed. A typical example is:
@@ -815,6 +962,8 @@ trait Contexts { self: Analyzer =>
)
}
+ def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
+
/** Find the symbol of a simple name starting from this context.
* All names are filtered through the "qualifies" predicate,
* the search continuing as long as no qualifying name is found.
@@ -1021,6 +1170,75 @@ trait Contexts { self: Analyzer =>
}
} //class Context
+ /** A `Context` focussed on an `Import` tree */
+ trait ImportContext extends Context {
+ private val impInfo: ImportInfo = {
+ val info = new ImportInfo(tree.asInstanceOf[Import], outerDepth)
+ if (settings.lint && !isRootImport) // excludes java.lang/scala/Predef imports
+ allImportInfos(unit) ::= info
+ info
+ }
+ override final def imports = impInfo :: super.imports
+ override final def firstImport = Some(impInfo)
+ override final def isRootImport = !tree.pos.isDefined
+ override final def toString = s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
+ }
+
+ /** A buffer for warnings and errors that are accumulated during speculative type checking. */
+ final class ReportBuffer {
+ type Error = AbsTypeError
+ type Warning = (Position, String)
+
+ private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results.
+
+ // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This
+ // is replicated here out of conservatism.
+ private var _errorBuffer: mutable.LinkedHashSet[Error] = _
+ private def errorBuffer = {if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer}
+ def errors: immutable.Seq[Error] = errorBuffer.toVector
+
+ private var _warningBuffer: mutable.LinkedHashSet[Warning] = _
+ private def warningBuffer = {if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer}
+ def warnings: immutable.Seq[Warning] = warningBuffer.toVector
+
+ def +=(error: AbsTypeError): this.type = {
+ errorBuffer += error
+ this
+ }
+ def ++=(errors: Traversable[AbsTypeError]): this.type = {
+ errorBuffer ++= errors
+ this
+ }
+ def +=(warning: Warning): this.type = {
+ warningBuffer += warning
+ this
+ }
+
+ def clearAll(): this.type = {
+ clearAllErrors(); clearAllWarnings();
+ }
+
+ def clearAllErrors(): this.type = {
+ errorBuffer.clear()
+ this
+ }
+ def clearErrors(removeF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+ errorBuffer.retain(!PartialFunction.cond(_)(removeF))
+ this
+ }
+ def retainErrors(leaveF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+ errorBuffer.retain(PartialFunction.cond(_)(leaveF))
+ this
+ }
+ def clearAllWarnings(): this.type = {
+ warningBuffer.clear()
+ this
+ }
+
+ def hasErrors = errorBuffer.nonEmpty
+ def firstError = errorBuffer.headOption
+ }
+
class ImportInfo(val tree: Import, val depth: Int) {
def pos = tree.pos
def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
@@ -1107,3 +1325,111 @@ trait Contexts { self: Analyzer =>
override def safeToString = "ImportType("+expr+")"
}
}
+
+object ContextMode {
+ private implicit def liftIntBitsToContextState(bits: Int): ContextMode = apply(bits)
+ def apply(bits: Int): ContextMode = new ContextMode(bits)
+ final val NOmode: ContextMode = 0
+
+ final val ReportErrors: ContextMode = 1 << 0
+ final val BufferErrors: ContextMode = 1 << 1
+ final val AmbiguousErrors: ContextMode = 1 << 2
+
+ /** Are we in a secondary constructor after the this constructor call? */
+ final val ConstructorSuffix: ContextMode = 1 << 3
+
+ /** For method context: were returns encountered? */
+ final val ReturnsSeen: ContextMode = 1 << 4
+
+ /** Is this context (enclosed in) a constructor call?
+ * (the call to the super or self constructor in the first line of a constructor.)
+ * In such a context, the object's fields should not be in scope
+ */
+ final val SelfSuperCall: ContextMode = 1 << 5
+
+ // TODO harvest documentation for this
+ final val ImplicitsEnabled: ContextMode = 1 << 6
+
+ final val MacrosEnabled: ContextMode = 1 << 7
+
+ /** To selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed */
+ final val EnrichmentEnabled: ContextMode = 1 << 8
+
+ /** Are we in a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */
+ final val Checking: ContextMode = 1 << 9
+
+ /** Are we retypechecking arguments independently from the function applied to them? See `Typer.tryTypedApply`
+ * TODO - iron out distinction/overlap with SecondTry.
+ */
+ final val ReTyping: ContextMode = 1 << 10
+
+ /** Are we typechecking pattern alternatives. Formerly ALTmode. */
+ final val PatternAlternative: ContextMode = 1 << 11
+
+ /** Are star patterns allowed. Formerly STARmode. */
+ final val StarPatterns: ContextMode = 1 << 12
+
+ /** Are we typing the "super" in a superclass constructor call super.<init>. Formerly SUPERCONSTRmode. */
+ final val SuperInit: ContextMode = 1 << 13
+
+ /* Is this the second attempt to type this tree? In that case functions
+ * may no longer be coerced with implicit views. Formerly SNDTRYmode.
+ */
+ final val SecondTry: ContextMode = 1 << 14
+
+ /** Are we in return position? Formerly RETmode. */
+ final val ReturnExpr: ContextMode = 1 << 15
+
+ /** Are unapplied type constructors allowed here? Formerly HKmode. */
+ final val TypeConstructorAllowed: ContextMode = 1 << 16
+
+ /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode.
+ * To mimick the sticky mode behavior, when captain stickyfingers
+ * comes around we need to propagate those modes but forget the other
+ * context modes which were once mode bits; those being so far the
+ * ones listed here.
+ */
+ final val FormerNonStickyModes: ContextMode = (
+ PatternAlternative | StarPatterns | SuperInit | SecondTry | ReturnExpr | TypeConstructorAllowed
+ )
+
+ final val DefaultMode: ContextMode = MacrosEnabled
+
+ private val contextModeNameMap = Map(
+ ReportErrors -> "ReportErrors",
+ BufferErrors -> "BufferErrors",
+ AmbiguousErrors -> "AmbiguousErrors",
+ ConstructorSuffix -> "ConstructorSuffix",
+ SelfSuperCall -> "SelfSuperCall",
+ ImplicitsEnabled -> "ImplicitsEnabled",
+ MacrosEnabled -> "MacrosEnabled",
+ Checking -> "Checking",
+ ReTyping -> "ReTyping",
+ PatternAlternative -> "PatternAlternative",
+ StarPatterns -> "StarPatterns",
+ SuperInit -> "SuperInit",
+ SecondTry -> "SecondTry",
+ TypeConstructorAllowed -> "TypeConstructorAllowed"
+ )
+}
+
+/**
+ * A value class to carry the boolean flags of a context, such as whether errors should
+ * be buffered or reported.
+ */
+final class ContextMode private (val bits: Int) extends AnyVal {
+ import ContextMode._
+
+ def &(other: ContextMode): ContextMode = new ContextMode(bits & other.bits)
+ def |(other: ContextMode): ContextMode = new ContextMode(bits | other.bits)
+ def &~(other: ContextMode): ContextMode = new ContextMode(bits & ~(other.bits))
+ def set(value: Boolean, mask: ContextMode) = if (value) |(mask) else &~(mask)
+
+ def inAll(required: ContextMode) = (this & required) == required
+ def inAny(required: ContextMode) = (this & required) != NOmode
+ def inNone(prohibited: ContextMode) = (this & prohibited) == NOmode
+
+ override def toString =
+ if (bits == 0) "NOmode"
+ else (contextModeNameMap filterKeys inAll).values.toList.sorted mkString " "
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index b9e4b4f591..95b771a8a5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -17,7 +17,7 @@ import scala.collection.{ mutable, immutable }
*/
abstract class Duplicators extends Analyzer {
import global._
- import definitions.{ AnyRefClass, AnyValClass }
+ import definitions._
/** Retype the given tree in the given context. Use this method when retyping
* a method in a different class. The typer will replace references to the this of
@@ -331,7 +331,7 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
*/
case Match(scrut, cases) =>
- val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrut1 = typedByValueExpr(scrut)
val scrutTpe = scrut1.tpe.widen
val cases1 = {
if (scrutTpe.isFinalType) cases filter {
@@ -346,8 +346,8 @@ abstract class Duplicators extends Analyzer {
// Without this, AnyRef specializations crash on patterns like
// case _: Boolean => ...
// Not at all sure this is safe.
- else if (scrutTpe <:< AnyRefClass.tpe)
- cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+ else if (scrutTpe <:< AnyRefTpe)
+ cases filterNot (_.pat.tpe <:< AnyValTpe)
else
cases
}
@@ -361,7 +361,7 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
val ntree = castType(tree, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 85e31347be..8e79b56814 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -8,7 +8,8 @@
//todo: disallow C#D in superclass
//todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.annotation.tailrec
@@ -80,9 +81,12 @@ trait Implicits {
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
- if (saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
- context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
- debuglog("update buffer: " + implicitSearchContext.errBuffer)
+ if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
+ context.updateBuffer(implicitSearchContext.reportBuffer.errors.collect {
+ case dte: DivergentImplicitTypeError => dte
+ case ate: AmbiguousImplicitTypeError => ate
+ })
+ debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors)
}
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
@@ -96,6 +100,21 @@ trait Implicits {
result
}
+ /** A friendly wrapper over inferImplicit to be used in macro contexts and toolboxes.
+ */
+ def inferImplicit(tree: Tree, pt: Type, isView: Boolean, context: Context, silent: Boolean, withMacrosDisabled: Boolean, pos: Position, onError: (Position, String) => Unit): Tree = {
+ val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
+ def wrapper(inference: => SearchResult) = wrapper1(inference)
+ val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos))
+ if (result.isFailure && !silent) {
+ val err = context.firstError
+ val errPos = err.map(_.errPos).getOrElse(pos)
+ val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+ onError(errPos, errMsg)
+ }
+ result.tree
+ }
+
/** Find all views from type `tp` (in which `tpars` are free)
*
* Note that the trees in the search results in the returned list share the same type variables.
@@ -112,7 +131,7 @@ trait Implicits {
val tvars = tpars map (TypeVar untouchable _)
val tpSubsted = tp.subst(tpars, tvars)
- val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(reportAmbiguousErrors = false))
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyTpe), true, context.makeImplicit(reportAmbiguousErrors = false))
search.allImplicitsPoly(tvars)
}
@@ -125,6 +144,15 @@ trait Implicits {
private val infoMapCache = new LinkedHashMap[Symbol, InfoMap]
private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]()
+ private def isInvalidConversionTarget(tpe: Type): Boolean = tpe match {
+ case Function1(_, out) => AnyRefClass.tpe <:< out
+ case _ => false
+ }
+ private def isInvalidConversionSource(tpe: Type): Boolean = tpe match {
+ case Function1(in, _) => in <:< NullClass.tpe
+ case _ => false
+ }
+
def resetImplicits() {
implicitsCache.clear()
infoMapCache.clear()
@@ -152,6 +180,7 @@ trait Implicits {
def isFailure = false
def isAmbiguousFailure = false
+ def isDivergent = false
final def isSuccess = !isFailure
}
@@ -159,6 +188,11 @@ trait Implicits {
override def isFailure = true
}
+ lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+ override def isFailure = true
+ override def isDivergent = true
+ }
+
lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
override def isAmbiguousFailure = true
@@ -208,9 +242,16 @@ trait Implicits {
case _ => false
}
override def hashCode = name.## + pre.## + sym.##
- override def toString = name + ": " + tpe
+ override def toString = (
+ if (tpeCache eq null) name + ": ?"
+ else name + ": " + tpe
+ )
}
+ /** A class which is used to track pending implicits to prevent infinite implicit searches.
+ */
+ case class OpenImplicit(info: ImplicitInfo, pt: Type, tree: Tree)
+
/** A sentinel indicating no implicit was found */
val NoImplicitInfo = new ImplicitInfo(null, NoType, NoSymbol) {
// equals used to be implemented in ImplicitInfo with an `if(this eq NoImplicitInfo)`
@@ -241,7 +282,8 @@ trait Implicits {
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
+ val dummyMethod = NoSymbol.newTermSymbol("typer$dummy") setInfo NullaryMethodType(AnyTpe)
+
def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
@@ -394,25 +436,33 @@ trait Implicits {
* @pre `info.tpe` does not contain an error
*/
private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
- (context.openImplicits find { case (tp, tree1) => tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
+ // SI-7167 let implicit macros decide what amounts for a divergent implicit search
+ // imagine a macro writer which wants to synthesize a complex implicit Complex[T] by making recursive calls to Complex[U] for its parts
+ // e.g. we have `class Foo(val bar: Bar)` and `class Bar(val x: Int)`
+ // then it's quite reasonable for the macro writer to synthesize Complex[Foo] by calling `inferImplicitValue(typeOf[Complex[Bar])`
+ // however if we didn't insert the `info.sym.isMacro` check here, then under some circumstances
+ // (e.g. as described here http://groups.google.com/group/scala-internals/browse_thread/thread/545462b377b0ac0a)
+ // `dominates` might decide that `Bar` dominates `Foo` and therefore a recursive implicit search should be prohibited
+ // now when we yield control of divergent expansions to the macro writer, what happens next?
+ // in the worst case, if the macro writer is careless, we'll get a StackOverflowException from repeated macro calls
+ // otherwise, the macro writer could check `c.openMacros` and `c.openImplicits` and do `c.abort` when expansions are deemed to be divergent
+ // upon receiving `c.abort` the typechecker will decide that the corresponding implicit search has failed
+ // which will fail the entire stack of implicit searches, producing a nice error message provided by the programmer
+ (context.openImplicits find { case OpenImplicit(info, tp, tree1) => !info.sym.isMacro && tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
//println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
- throw DivergentImplicit
+ DivergentSearchFailure
case None =>
try {
- context.openImplicits = (pt, tree) :: context.openImplicits
+ context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked, isLocal)
- } catch {
- case ex: DivergentImplicit =>
+ val result = typedImplicit0(info, ptChecked, isLocal)
+ if (result.isDivergent) {
//println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
- if (context.openImplicits.tail.isEmpty) {
- if (!(pt.isErroneous))
- DivergingImplicitExpansionError(tree, pt, info.sym)(context)
- SearchFailure
- } else {
- throw DivergentImplicit
- }
+ if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
+ DivergingImplicitExpansionError(tree, pt, info.sym)(context)
+ }
+ result
} finally {
context.openImplicits = context.openImplicits.tail
}
@@ -584,9 +634,11 @@ trait Implicits {
)
case _ => fallback
}
- if (context.hasErrors) {
- log("implicit adapt failed: " + context.errBuffer.head.errMsg)
- return fail(context.errBuffer.head.errMsg)
+ context.firstError match { // using match rather than foreach to avoid non local return.
+ case Some(err) =>
+ log("implicit adapt failed: " + err.errMsg)
+ return fail(err.errMsg)
+ case None =>
}
if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
@@ -609,7 +661,7 @@ trait Implicits {
}
if (context.hasErrors)
- fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
+ fail("hasMatchingSymbol reported error: " + context.firstError.get.errMsg)
else if (isLocal && !hasMatchingSymbol(itree1))
fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
@@ -632,8 +684,11 @@ trait Implicits {
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
- if (context.hasErrors)
- return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
+ context.firstError match {
+ case Some(err) =>
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + err.errMsg)
+ case None =>
+ }
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
@@ -668,13 +723,14 @@ trait Implicits {
case t => t
}
- if (context.hasErrors)
- fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
- else {
- val result = new SearchResult(itree2, subst)
- if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
- printInference("[success] found %s for pt %s".format(result, ptInstantiated))
- result
+ context.firstError match {
+ case Some(err) =>
+ fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg)
+ case None =>
+ val result = new SearchResult(itree2, subst)
+ if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
+ printInference("[success] found %s for pt %s".format(result, ptInstantiated))
+ result
}
}
else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated))
@@ -783,15 +839,20 @@ trait Implicits {
/** Preventing a divergent implicit from terminating implicit search,
* so that if there is a best candidate it can still be selected.
*/
- private var divergence = false
- private val divergenceHandler: PartialFunction[Throwable, SearchResult] = {
- var remaining = 1;
- { case x: DivergentImplicit if remaining > 0 =>
- remaining -= 1
- divergence = true
- log("discarding divergent implicit during implicit search")
+ object DivergentImplicitRecovery {
+ // symbol of the implicit that caused the divergence.
+ // Initially null, will be saved on first diverging expansion.
+ private var implicitSym: Symbol = _
+ private var countdown: Int = 1
+
+ def sym: Symbol = implicitSym
+ def apply(search: SearchResult, i: ImplicitInfo): SearchResult =
+ if (search.isDivergent && countdown > 0) {
+ countdown -= 1
+ implicitSym = i.sym
+ log("discarding divergent implicit ${implicitSym} during implicit search")
SearchFailure
- }
+ } else search
}
/** Sorted list of eligible implicits.
@@ -820,15 +881,15 @@ trait Implicits {
@tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
case Nil => acc
case i :: is =>
- def tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, ptChecked = true, isLocal)
- catch divergenceHandler
-
- tryImplicitInfo(i) match {
+ DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i) match {
+ case sr if sr.isDivergent =>
+ Nil
case sr if sr.isFailure =>
// We don't want errors that occur during checking implicit info
// to influence the check of further infos.
- context.condBufferFlush(_.kind != ErrorKinds.Divergent)
+ context.reportBuffer.retainErrors {
+ case err: DivergentImplicitTypeError => true
+ }
rankImplicits(is, acc)
case newBest =>
best = newBest
@@ -876,13 +937,14 @@ trait Implicits {
/* If there is no winner, and we witnessed and caught divergence,
* now we can throw it for the error message.
*/
- if (divergence)
- throw DivergentImplicit
+ if (DivergentImplicitRecovery.sym != null) {
+ DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
+ }
if (invalidImplicits.nonEmpty)
setAddendum(pos, () =>
- "\n Note: implicit "+invalidImplicits.head+" is not applicable here"+
- " because it comes after the application point and it lacks an explicit result type")
+ s"\n Note: implicit ${invalidImplicits.head} is not applicable here because it comes after the application point and it lacks an explicit result type"
+ )
}
best
@@ -1085,8 +1147,10 @@ trait Implicits {
try {
val tree1 = typedPos(pos.focus)(arg)
- if (context.hasErrors) processMacroExpansionError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
- else new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ context.firstError match {
+ case Some(err) => processMacroExpansionError(err.errPos, err.errMsg)
+ case None => new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ }
} catch {
case ex: TypeError =>
processMacroExpansionError(ex.pos, ex.msg)
@@ -1194,8 +1258,8 @@ trait Implicits {
// looking for a manifest of a type parameter that hasn't been inferred by now,
// can't do much, but let's not fail
else if (undetParams contains sym) {
- // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
- mot(NothingClass.tpe, sym :: from, NothingClass.tpe :: to)
+ // #3859: need to include the mapping from sym -> NothingTpe in the SearchResult
+ mot(NothingTpe, sym :: from, NothingTpe :: to)
} else {
// a manifest should have been found by normal searchImplicit
EmptyTree
@@ -1302,10 +1366,10 @@ trait Implicits {
val wasAmbigious = result.isAmbiguousFailure // SI-6667, never search companions after an ambiguous error in in-scope implicits
result = materializeImplicit(pt)
-
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
- if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, isLocal = false)
+ if (result.isFailure && !wasAmbigious)
+ result = searchImplicit(implicitsOfExpectedType, isLocal = false)
if (result.isFailure) {
context.updateBuffer(previousErrs)
@@ -1315,9 +1379,18 @@ trait Implicits {
if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
}
-
- if (result.isFailure && settings.debug)
- log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+ if (result.isSuccess && isView) {
+ if (isInvalidConversionTarget(pt)) {
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, "the result type of an implicit conversion must be more specific than AnyRef"))
+ result = SearchFailure
+ }
+ else if (isInvalidConversionSource(pt)) {
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, "an expression of type Null is ineligible for implicit conversion"))
+ result = SearchFailure
+ }
+ }
+ if (result.isFailure)
+ debuglog("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
result
}
@@ -1430,6 +1503,3 @@ object ImplicitsStats {
val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
}
-
-class DivergentImplicit extends Exception
-object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8d7830897d..961ef484d8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -6,12 +6,11 @@
package scala.tools.nsc
package typechecker
-import scala.collection.immutable
-import scala.collection.mutable.ListBuffer
+import scala.collection.{ mutable, immutable }
import scala.util.control.ControlThrowable
import symtab.Flags._
-/** This trait ...
+/** This trait contains methods related to type parameter inference.
*
* @author Martin Odersky
* @version 1.0
@@ -24,11 +23,6 @@ trait Infer extends Checkable {
import typer.printInference
import typeDebug.ptBlock
-/* -- Type parameter inference utility functions --------------------------- */
-
- private def assertNonCyclic(tvar: TypeVar) =
- assert(tvar.constr.inst != tvar, tvar.origin)
-
/** The formal parameter types corresponding to `formals`.
* If `formals` has a repeated last parameter, a list of
* (nargs - params.length + 1) copies of its type is returned.
@@ -38,10 +32,7 @@ trait Infer extends Checkable {
* @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
- val formals1 = if (removeByName) formals mapConserve {
- case TypeRef(_, ByNameParamClass, List(arg)) => arg
- case formal => formal
- } else formals
+ val formals1 = if (removeByName) formals mapConserve dropByName else formals
if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
val ft = formals1.last.dealiasWiden.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
@@ -54,9 +45,9 @@ trait Infer extends Checkable {
*/
private def bestAlternatives(alternatives: List[Symbol])(isBetter: (Symbol, Symbol) => Boolean): List[Symbol] = {
def improves(sym1: Symbol, sym2: Symbol) = (
- sym2 == NoSymbol
+ (sym2 eq NoSymbol)
|| sym2.isError
- || sym2.hasAnnotation(BridgeClass)
+ || (sym2 hasAnnotation BridgeClass)
|| isBetter(sym1, sym2)
)
@@ -66,12 +57,26 @@ trait Infer extends Checkable {
}
}
+ // we must not allow CyclicReference to be thrown when sym.info is called
+ // in checkAccessible, because that would mark the symbol erroneous, which it
+ // is not. But if it's a true CyclicReference then macro def will report it.
+ // See comments to TypeSigError for an explanation of this special case.
+ // [Eugene] is there a better way?
+ private object CheckAccessibleMacroCycle extends TypeCompleter {
+ val tree = EmptyTree
+ override def complete(sym: Symbol) = ()
+ }
+
/** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
* for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
* unapply[Seq] call is assumed to have result type `resTp`.
*
* `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
*
+ * @param nbSubPats The number of arguments to the extractor pattern
+ * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
+ * bind patterns, is a Tuple pattern, in which case it is the number of
+ * elements. Used to issue warnings about binding a `TupleN` to a single value.
* @throws TypeError when the unapply[Seq] definition is ill-typed
* @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
*
@@ -101,7 +106,8 @@ trait Infer extends Checkable {
* `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
*
*/
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
+ def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
+ unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
val isUnapplySeq = unappSym.name == nme.unapplySeq
val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
@@ -131,8 +137,9 @@ trait Infer extends Checkable {
else if (optionArgs.nonEmpty)
if (nbSubPats == 1) {
val productArity = productArgs.size
- if (productArity > 1 && settings.lint)
- global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
+ if (productArity > 1 && productArity != effectiveNbSubPats && settings.lint)
+ global.currentUnit.warning(pos,
+ s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
optionArgs
}
// TODO: update spec to reflect we allow any ProductN, not just TupleN
@@ -167,46 +174,34 @@ trait Infer extends Checkable {
*/
object instantiate extends TypeMap {
private var excludedVars = immutable.Set[TypeVar]()
+ private def applyTypeVar(tv: TypeVar): Type = tv match {
+ case TypeVar(origin, constr) if !constr.instValid => throw new DeferredNoInstance(() => s"no unique instantiation of type variable $origin could be found")
+ case _ if excludedVars(tv) => throw new NoInstance("cyclic instantiation")
+ case TypeVar(_, constr) =>
+ excludedVars += tv
+ try apply(constr.inst)
+ finally excludedVars -= tv
+ }
def apply(tp: Type): Type = tp match {
- case WildcardType | BoundedWildcardType(_) | NoType =>
- throw new NoInstance("undetermined type")
- case tv @ TypeVar(origin, constr) if !tv.untouchable =>
- if (constr.inst == NoType) {
- throw new DeferredNoInstance(() =>
- s"no unique instantiation of type variable $origin could be found")
- } else if (excludedVars(tv)) {
- throw new NoInstance("cyclic instantiation")
- } else {
- excludedVars += tv
- try apply(constr.inst)
- finally excludedVars -= tv
- }
- case _ =>
- mapOver(tp)
+ case WildcardType | BoundedWildcardType(_) | NoType => throw new NoInstance("undetermined type")
+ case tv: TypeVar if !tv.untouchable => applyTypeVar(tv)
+ case _ => mapOver(tp)
}
}
+ @inline final def falseIfNoInstance(body: => Boolean): Boolean =
+ try body catch { case _: NoInstance => false }
+
/** Is type fully defined, i.e. no embedded anytypes or wildcards in it?
*/
private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
- case WildcardType | BoundedWildcardType(_) | NoType =>
- false
- case NoPrefix | ThisType(_) | ConstantType(_) =>
- true
- case TypeRef(pre, sym, args) =>
- isFullyDefined(pre) && (args forall isFullyDefined)
- case SingleType(pre, sym) =>
- isFullyDefined(pre)
- case RefinedType(ts, decls) =>
- ts forall isFullyDefined
- case TypeVar(origin, constr) if (constr.inst == NoType) =>
- false
- case _ =>
- try {
- instantiate(tp); true
- } catch {
- case ex: NoInstance => false
- }
+ case WildcardType | BoundedWildcardType(_) | NoType => false
+ case NoPrefix | ThisType(_) | ConstantType(_) => true
+ case TypeRef(pre, _, args) => isFullyDefined(pre) && (args forall isFullyDefined)
+ case SingleType(pre, _) => isFullyDefined(pre)
+ case RefinedType(ts, _) => ts forall isFullyDefined
+ case TypeVar(_, constr) if constr.inst == NoType => false
+ case _ => falseIfNoInstance({ instantiate(tp) ; true })
}
/** Solve constraint collected in types `tvars`.
@@ -258,18 +253,12 @@ trait Infer extends Checkable {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
- case pt @ PolyType(tparams, restpe) =>
- logResult(s"Normalizing $tp in infer")(normalize(restpe))
- case mt @ MethodType(params, restpe) if mt.isImplicit =>
- normalize(restpe)
- case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
- functionType(mt.paramTypes, normalize(restpe))
- case NullaryMethodType(restpe) =>
- normalize(restpe)
- case ExistentialType(tparams, qtpe) =>
- newExistentialType(tparams, normalize(qtpe))
- case tp1 =>
- tp1 // @MAT aliases already handled by subtyping
+ case PolyType(_, restpe) => logResult(s"Normalizing $tp in infer")(normalize(restpe))
+ case mt @ MethodType(_, restpe) if mt.isImplicit => normalize(restpe)
+ case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => functionType(mt.paramTypes, normalize(restpe))
+ case NullaryMethodType(restpe) => normalize(restpe)
+ case ExistentialType(tparams, qtpe) => newExistentialType(tparams, normalize(qtpe))
+ case _ => tp // @MAT aliases already handled by subtyping
}
private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
@@ -281,13 +270,13 @@ trait Infer extends Checkable {
/* -- Error Messages --------------------------------------------------- */
def setError[T <: Tree](tree: T): T = {
- debuglog("set error: "+ tree)
- // this breaks -Ydebug pretty radically
- // if (settings.debug.value) { // DEBUG
- // println("set error: "+tree);
- // throw new Error()
- // }
- def name = newTermName("<error: " + tree.symbol + ">")
+ // SI-7388, one can incur a cycle calling sym.toString
+ // (but it'd be nicer if that weren't so)
+ def name = {
+ val sym = tree.symbol
+ val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
+ newTermName(s"<error: $nameStr>")
+ }
def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
@@ -302,102 +291,87 @@ trait Infer extends Checkable {
def issue(err: AbsTypeError): Unit = context.issue(err)
- def isPossiblyMissingArgs(found: Type, req: Type) = (
- false
- /* However it is that this condition is expected to imply
- * "is possibly missing args", it is too weak. It is
- * better to say nothing than to offer misleading guesses.
-
- * (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
- */
- )
-
def explainTypes(tp1: Type, tp2: Type) = {
if (context.reportErrors)
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
}
+ // When filtering sym down to the accessible alternatives leaves us empty handed.
+ private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+ if (settings.debug) {
+ Console.println(context)
+ Console.println(tree)
+ Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
+ }
+ ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
+ if (settings.check.isDefault)
+ analyzer.lastAccessCheckDetails
+ else
+ ptBlock("because of an internal error (no accessible symbol)",
+ "sym.ownerChain" -> sym.ownerChain,
+ "underlyingSymbol(sym)" -> underlyingSymbol(sym),
+ "pre" -> pre,
+ "site" -> site,
+ "tree" -> tree,
+ "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
+ "context.owner" -> context.owner,
+ "context.outer.enclClass.owner" -> context.outer.enclClass.owner
+ )
+ ))(context)
+
+ setError(tree)
+ }
+
/* -- Tests & Checks---------------------------------------------------- */
/** Check that `sym` is defined and accessible as a member of
* tree `site` with type `pre` in current context.
+ * @PP: In case it's not abundantly obvious to anyone who might read
+ * this, the method does a lot more than "check" these things, as does
+ * nearly every method in the compiler, so don't act all shocked.
+ * This particular example "checks" its way to assigning both the
+ * symbol and type of the incoming tree, in addition to forcing lots
+ * of symbol infos on its way to transforming java raw types (but
+ * only of terms - why?)
*
* Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
* since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
*/
- def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
- if (sym.isError) {
- tree setSymbol sym setType ErrorType
- } else {
- if (context.unit.exists)
- context.unit.depends += sym.enclosingTopLevelClass
-
- var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
- // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
- if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
- sym1 = sym
-
- if (sym1 == NoSymbol) {
- if (settings.debug) {
- Console.println(context)
- Console.println(tree)
- Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
- }
- ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
- if (settings.check.isDefault)
- analyzer.lastAccessCheckDetails
- else
- ptBlock("because of an internal error (no accessible symbol)",
- "sym.ownerChain" -> sym.ownerChain,
- "underlyingSymbol(sym)" -> underlyingSymbol(sym),
- "pre" -> pre,
- "site" -> site,
- "tree" -> tree,
- "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
- "context.owner" -> context.owner,
- "context.outer.enclClass.owner" -> context.outer.enclClass.owner
- )
- ))(context)
- setError(tree)
- }
- else {
- if (context.owner.isTermMacro && (sym1 hasFlag LOCKED)) {
- // we must not let CyclicReference to be thrown from sym1.info
- // because that would mark sym1 erroneous, which it is not
- // but if it's a true CyclicReference then macro def will report it
- // see comments to TypeSigError for an explanation of this special case
- // [Eugene] is there a better way?
- val dummy = new TypeCompleter { val tree = EmptyTree; override def complete(sym: Symbol) {} }
- throw CyclicReference(sym1, dummy)
- }
+ def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+ def malformed(ex: MalformedType, instance: Type): Type = {
+ val what = if (ex.msg contains "malformed type") "is malformed" else s"contains a ${ex.msg}"
+ val message = s"\n because its instance type $instance $what"
+ val error = AccessError(tree, sym, pre, context.enclClass.owner, message)
+ ErrorUtils.issueTypeError(error)(context)
+ ErrorType
+ }
+ def accessible = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) match {
+ case NoSymbol if sym.isJavaDefined && context.unit.isJava => sym // don't try to second guess Java; see #4402
+ case sym1 => sym1
+ }
+ // XXX So... what's this for exactly?
+ if (context.unit.exists)
+ context.unit.depends += sym.enclosingTopLevelClass
- if (sym1.isTerm)
- sym1.cookJavaRawInfo() // xform java rawtypes into existentials
-
- val owntype = {
- try pre.memberType(sym1)
- catch {
- case ex: MalformedType =>
- if (settings.debug) ex.printStackTrace
- val sym2 = underlyingSymbol(sym1)
- val itype = pre.memberType(sym2)
- ErrorUtils.issueTypeError(
- AccessError(tree, sym, pre, context.enclClass.owner,
- "\n because its instance type "+itype+
- (if ("malformed type: "+itype.toString==ex.msg) " is malformed"
- else " contains a "+ex.msg)))(context)
- ErrorType
- }
- }
- tree setSymbol sym1 setType {
+ if (sym.isError)
+ tree setSymbol sym setType ErrorType
+ else accessible match {
+ case NoSymbol => checkAccessibleError(tree, sym, pre, site)
+ case sym if context.owner.isTermMacro && (sym hasFlag LOCKED) => throw CyclicReference(sym, CheckAccessibleMacroCycle)
+ case sym =>
+ val sym1 = if (sym.isTerm) sym.cookJavaRawInfo() else sym // xform java rawtypes into existentials
+ val owntype = (
+ try pre memberType sym1
+ catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) }
+ )
+ tree setSymbol sym1 setType (
pre match {
case _: SuperType => owntype map (tp => if (tp eq pre) site.symbol.thisType else tp)
case _ => owntype
}
- }
- }
+ )
}
-
+ }
/** "Compatible" means conforming after conversions.
* "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
@@ -408,45 +382,38 @@ trait Infer extends Checkable {
* since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
*/
private def isCompatible(tp: Type, pt: Type): Boolean = {
- def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
- case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
- case _ => false
- }
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = (
+ isByNameParamType(pt)
+ && !isByNameParamType(tp)
+ && isCompatible(tp, dropByName(pt))
+ )
val tp1 = normalize(tp)
- (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
+
+ ( (tp1 weak_<:< pt)
+ || isCoercible(tp1, pt)
+ || isCompatibleByName(tp, pt)
+ )
}
- def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
- (tps corresponds pts)(isCompatible)
+ def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
- def isWeaklyCompatible(tp: Type, pt: Type): Boolean =
- pt.typeSymbol == UnitClass || // can perform unit coercion
- isCompatible(tp, pt) ||
- tp.isInstanceOf[MethodType] && // can perform implicit () instantiation
- tp.params.isEmpty && isCompatible(tp.resultType, pt)
+ def isWeaklyCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleNoParamsMethod = tp match {
+ case MethodType(Nil, restpe) => isCompatible(restpe, pt)
+ case _ => false
+ }
+ ( pt.typeSymbol == UnitClass // can perform unit coercion
+ || isCompatible(tp, pt)
+ || isCompatibleNoParamsMethod // can perform implicit () instantiation
+ )
+ }
- /** Like weakly compatible but don't apply any implicit conversions yet.
+ /* Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
- *
- * [Martin] I think Infer is also created by Erasure, with the default
- * implementation of isCoercible
- * [Paulp] (Assuming the above must refer to my comment on isCoercible)
- * Nope, I examined every occurrence of Inferencer in trunk. It
- * appears twice as a self-type, once at its definition, and once
- * where it is instantiated in Typers. There are no others.
- *
- % ack -A0 -B0 --no-filename '\bInferencer\b' src
- self: Inferencer =>
- self: Inferencer =>
- class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
- val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
- /** This is overridden in the Typer.infer with some logic, but since
- * that's the only place in the compiler an Inferencer is ever created,
- * I suggest this should either be abstract or have the implementation.
- */
+ // Overridden at the point of instantiation, where inferView is visible.
def isCoercible(tp: Type, pt: Type): Boolean = false
/* -- Type instantiation------------------------------------------------ */
@@ -455,49 +422,50 @@ trait Infer extends Checkable {
* by existentially bound variables.
*/
def makeFullyDefined(tp: Type): Type = {
- val tparams = new ListBuffer[Symbol]
+ var tparams: List[Symbol] = Nil
def addTypeParam(bounds: TypeBounds): Type = {
val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds
- tparams += tparam
+ tparams ::= tparam
tparam.tpe
}
val tp1 = tp map {
- case WildcardType =>
- addTypeParam(TypeBounds.empty)
- case BoundedWildcardType(bounds) =>
- addTypeParam(bounds)
- case t => t
+ case WildcardType => addTypeParam(TypeBounds.empty)
+ case BoundedWildcardType(bounds) => addTypeParam(bounds)
+ case t => t
}
- existentialAbstraction(tparams.toList, tp1)
+ if (tp eq tp1) tp
+ else existentialAbstraction(tparams.reverse, tp1)
}
+ def ensureFullyDefined(tp: Type): Type = if (isFullyDefined(tp)) tp else makeFullyDefined(tp)
/** Return inferred type arguments of polymorphic expression, given
- * its type parameters and result type and a prototype `pt`.
- * If no minimal type variables exist that make the
- * instantiated type a subtype of `pt`, return null.
+ * type vars, its type parameters and result type and a prototype `pt`.
+ * If the type variables cannot be instantiated such that the type
+ * conforms to `pt`, return null.
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
- val tvars = tparams map freshVar
- val instResTp = restpe.instantiateTypeParams(tparams, tvars)
- if ( if (useWeaklyCompatible) isWeaklyCompatible(instResTp, pt) else isCompatible(instResTp, pt) ) {
- try {
- // If the restpe is an implicit method, and the expected type is fully defined
- // optimize type variables wrt to the implicit formals only; ignore the result type.
- // See test pos/jesper.scala
- val varianceType = restpe match {
- case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
- MethodType(mt.params, AnyClass.tpe)
- case _ =>
- restpe
- }
- //println("try to solve "+tvars+" "+tparams)
- (solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
- upper = false, lubDepth(List(restpe, pt))), tvars)
- } catch {
- case ex: NoInstance => (null, null)
- }
- } else (null, null)
+ private def exprTypeArgs(tvars: List[TypeVar], tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] = {
+ def restpeInst = restpe.instantiateTypeParams(tparams, tvars)
+ def conforms = if (useWeaklyCompatible) isWeaklyCompatible(restpeInst, pt) else isCompatible(restpeInst, pt)
+ // If the restpe is an implicit method, and the expected type is fully defined
+ // optimize type variables wrt to the implicit formals only; ignore the result type.
+ // See test pos/jesper.scala
+ def variance = restpe match {
+ case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe)
+ case _ => restpe
+ }
+ def solve() = solvedTypes(tvars, tparams, tparams map varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil))
+
+ if (conforms)
+ try solve() catch { case _: NoInstance => null }
+ else
+ null
}
+ /** Overload which allocates fresh type vars.
+ * The other one exists because apparently inferExprInstance needs access to the typevars
+ * after the call, and its wasteful to return a tuple and throw it away almost every time.
+ */
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] =
+ exprTypeArgs(tparams map freshVar, tparams, restpe, pt, useWeaklyCompatible)
/** Return inferred proto-type arguments of function, given
* its type and value parameters and result type, and a
@@ -508,22 +476,20 @@ trait Infer extends Checkable {
* If instantiation of a type parameter fails,
* take WildcardType for the proto-type argument.
*/
- def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- pt: Type): List[Type] = {
- /* Map type variable to its instance, or, if `variance` is covariant/contravariant,
- * to its upper/lower bound */
+ def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type, pt: Type): List[Type] = {
+ // Map type variable to its instance, or, if `variance` is variant,
+ // to its upper or lower bound
def instantiateToBound(tvar: TypeVar, variance: Variance): Type = {
lazy val hiBounds = tvar.constr.hiBounds
lazy val loBounds = tvar.constr.loBounds
- lazy val upper = glb(hiBounds)
- lazy val lower = lub(loBounds)
+ lazy val upper = glb(hiBounds)
+ lazy val lower = lub(loBounds)
def setInst(tp: Type): Type = {
tvar setInst tp
- assertNonCyclic(tvar)//debug
+ assert(tvar.constr.inst != tvar, tvar.origin)
instantiate(tvar.constr.inst)
}
- //Console.println("instantiate "+tvar+tvar.constr+" variance = "+variance);//DEBUG
- if (tvar.constr.inst != NoType)
+ if (tvar.constr.instValid)
instantiate(tvar.constr.inst)
else if (loBounds.nonEmpty && variance.isContravariant)
setInst(lower)
@@ -532,6 +498,7 @@ trait Infer extends Checkable {
else
WildcardType
}
+
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
map2(tparams, tvars)((tparam, tvar) =>
@@ -546,8 +513,8 @@ trait Infer extends Checkable {
* and the code is not exactly readable.
*/
object AdjustedTypeArgs {
- val Result = scala.collection.mutable.LinkedHashMap
- type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = mutable.LinkedHashMap
+ type Result = mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
(m collect {case (p, Some(a)) => (p, a)}).unzip ))
@@ -564,7 +531,7 @@ trait Infer extends Checkable {
def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
val (okArgs, okTparams) = ok.unzip
- (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
+ (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys)
})
}
@@ -614,7 +581,7 @@ trait Infer extends Checkable {
/** Return inferred type arguments, given type parameters, formal parameters,
* argument types, result type and expected result type.
* If this is not possible, throw a `NoInstance` exception.
- * Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
+ * Undetermined type arguments are represented by `definitions.NothingTpe`.
* No check that inferred parameters conform to their bounds is made here.
*
* @param tparams the type parameters of the method
@@ -726,15 +693,13 @@ trait Infer extends Checkable {
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
case _ =>
- val appmeth = {
- //OPT cut down on #closures by special casing non-overloaded case
- // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
- val result = tp.nonPrivateMember(nme.apply)
- if ((result eq NoSymbol) || !result.isOverloaded && result.isPublic) result
- else result filter (_.isPublic)
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ tp nonPrivateMember nme.apply match {
+ case NoSymbol => tp
+ case sym if !sym.isOverloaded && sym.isPublic => OverloadedType(tp, sym.alternatives)
+ case sym => OverloadedType(tp, sym filter (_.isPublic) alternatives)
}
- if (appmeth == NoSymbol) tp
- else OverloadedType(tp, appmeth.alternatives)
}
/**
@@ -748,7 +713,7 @@ trait Infer extends Checkable {
* to the corresponding position in params
* - namesOK is false when there's an invalid use of named arguments
*/
- private def checkNames(argtpes: List[Type], params: List[Symbol]) = {
+ private def checkNames(argtpes: List[Type], params: List[Symbol]): (List[Type], Array[Int], Boolean) = {
val argPos = Array.fill(argtpes.length)(-1)
var positionalAllowed, namesOK = true
var index = 0
@@ -760,7 +725,7 @@ trait Infer extends Checkable {
if (pos == -1) {
if (positionalAllowed) { // treat assignment as positional argument
argPos(index) = index
- res = UnitClass.tpe
+ res = UnitTpe
} else // unknown parameter name
namesOK = false
} else if (argPos.contains(pos)) { // parameter specified twice
@@ -808,10 +773,10 @@ trait Infer extends Checkable {
* @pre: the argument list is eligible for tuple conversion.
*/
private def typeAfterTupleConversion(argtpes: List[Type]): Type = (
- if (argtpes.isEmpty) UnitClass.tpe // aka "Tuple0"
+ if (argtpes.isEmpty) UnitTpe // aka "Tuple0"
else tupleType(argtpes map {
- case NamedType(name, tp) => UnitClass.tpe // not a named arg - only assignments here
- case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_*
+ case NamedType(name, tp) => UnitTpe // not a named arg - only assignments here
+ case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_*
case tp => tp
})
)
@@ -827,9 +792,50 @@ trait Infer extends Checkable {
argtpes
}
- /** Is there an instantiation of free type variables `undetparams`
- * such that function type `ftpe` is applicable to
- * `argtpes` and its result conform to `pt`?
+ private def isApplicableToMethod(undetparams: List[Symbol], mt: MethodType, argtpes0: List[Type], pt: Type): Boolean = {
+ val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
+ def missingArgs = missingParams[Type](argtpes0, mt.params, x => Some(x) collect { case NamedType(n, _) => n })
+ def argsTupled = tupleIfNecessary(mt.paramTypes, argtpes0)
+ def argsPlusDefaults = missingArgs match {
+ case (args, _) if args forall (_.hasDefault) => argtpes0 ::: makeNamedTypes(args)
+ case _ => argsTupled
+ }
+ // If args eq the incoming arg types, fail; otherwise recurse with these args.
+ def tryWithArgs(args: List[Type]) = (
+ (args ne argtpes0)
+ && isApplicable(undetparams, mt, args, pt)
+ )
+ def tryInstantiating(args: List[Type]) = falseIfNoInstance {
+ val restpe = mt resultType args
+ val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, args, pt)
+ val restpeInst = restpe.instantiateTypeParams(okparams, okargs)
+ // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
+ exprTypeArgs(leftUndet, restpeInst, pt, useWeaklyCompatible = true) match {
+ case null => false
+ case _ => isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
+ }
+ }
+ def typesCompatible(args: List[Type]) = undetparams match {
+ case Nil => isCompatibleArgs(args, formals) && isWeaklyCompatible(mt resultType args, pt)
+ case _ => tryInstantiating(args)
+ }
+
+ // when using named application, the vararg param has to be specified exactly once
+ def reorderedTypesCompatible = checkNames(argtpes0, mt.params) match {
+ case (_, _, false) => false // names are not ok
+ case (_, pos, _) if !allArgsArePositional(pos) && !sameLength(formals, mt.params) => false // different length lists and all args not positional
+ case (args, pos, _) => typesCompatible(reorderArgs(args, pos))
+ }
+ compareLengths(argtpes0, formals) match {
+ case 0 if containsNamedType(argtpes0) => reorderedTypesCompatible // right number of args, wrong order
+ case 0 => typesCompatible(argtpes0) // fast track if no named arguments are used
+ case x if x > 0 => tryWithArgs(argsTupled) // too many args, try tupling
+ case _ => tryWithArgs(argsPlusDefaults) // too few args, try adding defaults or tupling
+ }
+ }
+
+ /** Is there an instantiation of free type variables `undetparams` such that
+ * function type `ftpe` is applicable to `argtpes0` and its result conform to `pt`?
*
* @param ftpe the type of the function (often a MethodType)
* @param argtpes0 the argument types; a NamedType(name, tp) for named
@@ -837,174 +843,94 @@ trait Infer extends Checkable {
* type is set to `Unit`, i.e. the corresponding argument is treated as
* an assignment expression (@see checkNames).
*/
- private def isApplicable(undetparams: List[Symbol], ftpe: Type,
- argtpes0: List[Type], pt: Type): Boolean =
+ private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = (
ftpe match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt))
- case ExistentialType(tparams, qtpe) =>
- isApplicable(undetparams, qtpe, argtpes0, pt)
- case mt @ MethodType(params, _) =>
- val argslen = argtpes0.length
- val formals = formalTypes(mt.paramTypes, argslen, removeByName = false)
-
- def tryTupleApply = {
- val tupled = tupleIfNecessary(mt.paramTypes, argtpes0)
- (tupled ne argtpes0) && isApplicable(undetparams, ftpe, tupled, pt)
- }
- def typesCompatible(argtpes: List[Type]) = {
- val restpe = ftpe.resultType(argtpes)
- if (undetparams.isEmpty) {
- isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
- } else {
- try {
- val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
- (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true)._1 ne null) &&
- isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
- } catch {
- case ex: NoInstance => false
- }
- }
- }
-
- // very similar logic to doTypedApply in typechecker
- val lencmp = compareLengths(argtpes0, formals)
- if (lencmp > 0) tryTupleApply
- else if (lencmp == 0) {
- // fast track if no named arguments are used
- if (!containsNamedType(argtpes0))
- typesCompatible(argtpes0)
- else {
- // named arguments are used
- val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params)
- // when using named application, the vararg param has to be specified exactly once
- ( namesOK
- && (allArgsArePositional(argPos) || sameLength(formals, params))
- && typesCompatible(reorderArgs(argtpes1, argPos)) // nb. arguments and names are OK, check if types are compatible
- )
- }
- }
- else {
- // not enough arguments, check if applicable using defaults
- val missing = missingParams[Type](argtpes0, params, {
- case NamedType(name, _) => Some(name)
- case _ => None
- })._1
- if (missing forall (_.hasDefault)) {
- // add defaults as named arguments
- val argtpes1 = argtpes0 ::: (missing map (p => NamedType(p.name, p.tpe)))
- isApplicable(undetparams, ftpe, argtpes1, pt)
- }
- else tryTupleApply
- }
-
- case NullaryMethodType(restpe) => // strip nullary method type, which used to be done by the polytype case below
- isApplicable(undetparams, restpe, argtpes0, pt)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, restpe)((tps1, restpe1) => isApplicable(tps1 ::: undetparams, restpe1, argtpes0, pt))
- case ErrorType =>
- true
- case _ =>
- false
+ case OverloadedType(pre, alts) => alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt))
+ case ExistentialType(_, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt)
+ case mt @ MethodType(_, _) => isApplicableToMethod(undetparams, mt, argtpes0, pt)
+ case NullaryMethodType(restpe) => isApplicable(undetparams, restpe, argtpes0, pt)
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, restpe)((tps1, res1) => isApplicable(tps1 ::: undetparams, res1, argtpes0, pt))
+ case ErrorType => true
+ case _ => false
}
+ )
/**
- * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
- * The chance of TypeErrors should be reduced through context errors
+ * Are arguments of the given types applicable to `ftpe`? Type argument inference
+ * is tried twice: firstly with the given expected type, and secondly with `WildcardType`.
*/
- private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
- argtpes0: List[Type], pt: Type): Boolean = {
- val silentContext = context.makeSilent(reportAmbiguousErrors = false)
- val typer0 = newTyper(silentContext)
- val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
- if (pt != WildcardType && silentContext.hasErrors) {
- silentContext.flushBuffer()
- val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType)
- if (silentContext.hasErrors) false else res2
- } else res1
+ // Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ // The chance of TypeErrors should be reduced through context errors
+ private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = {
+ def applicableExpectingPt(pt: Type): Boolean = {
+ val silent = context.makeSilent(reportAmbiguousErrors = false)
+ val result = newTyper(silent).infer.isApplicable(undetparams, ftpe, argtpes0, pt)
+ if (silent.hasErrors && !pt.isWildcard)
+ applicableExpectingPt(WildcardType) // second try
+ else
+ result
+ }
+ applicableExpectingPt(pt)
}
/** Is type `ftpe1` strictly more specific than type `ftpe2`
* when both are alternatives in an overloaded function?
* @see SLS (sec:overloading-resolution)
*/
- def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => isAsSpecific(pre memberType alt, ftpe2))
- case et: ExistentialType =>
- isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
- //et.withTypeVars(isAsSpecific(_, ftpe2))
- case NullaryMethodType(res) =>
- isAsSpecific(res, ftpe2)
- case mt: MethodType if mt.isImplicit =>
- isAsSpecific(ftpe1.resultType, ftpe2)
- case mt @ MethodType(params, _) if params.nonEmpty =>
- var argtpes = mt.paramTypes
- if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
- argtpes = argtpes map (argtpe =>
- if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
- isApplicable(List(), ftpe2, argtpes, WildcardType)
- case PolyType(tparams, NullaryMethodType(res)) =>
- isAsSpecific(PolyType(tparams, res), ftpe2)
- case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
- isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
- case PolyType(_, (mt @ MethodType(params, _))) if params.nonEmpty =>
- isApplicable(List(), ftpe2, mt.paramTypes, WildcardType)
- // case NullaryMethodType(res) =>
- // isAsSpecific(res, ftpe2)
- case ErrorType =>
- true
- case _ =>
- ftpe2 match {
- case OverloadedType(pre, alts) =>
- alts forall (alt => isAsSpecific(ftpe1, pre memberType alt))
- case et: ExistentialType =>
- et.withTypeVars(isAsSpecific(ftpe1, _))
- case mt: MethodType =>
- !mt.isImplicit || isAsSpecific(ftpe1, mt.resultType)
- case NullaryMethodType(res) =>
- isAsSpecific(ftpe1, res)
- case PolyType(tparams, NullaryMethodType(res)) =>
- isAsSpecific(ftpe1, PolyType(tparams, res))
- case PolyType(tparams, mt: MethodType) =>
- !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
- case _ =>
- isAsSpecificValueType(ftpe1, ftpe2, List(), List())
- }
+ def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = {
+ def checkIsApplicable(argtpes: List[Type]) = isApplicable(Nil, ftpe2, argtpes, WildcardType)
+ def bothAreVarargs = isVarArgsList(ftpe1.params) && isVarArgsList(ftpe2.params)
+ def onRight = ftpe2 match {
+ case OverloadedType(pre, alts) => alts forall (alt => isAsSpecific(ftpe1, pre memberType alt))
+ case et: ExistentialType => et.withTypeVars(isAsSpecific(ftpe1, _))
+ case mt @ MethodType(_, restpe) => !mt.isImplicit || isAsSpecific(ftpe1, restpe)
+ case NullaryMethodType(res) => isAsSpecific(ftpe1, res)
+ case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(ftpe1, PolyType(tparams, restpe))
+ case PolyType(tparams, mt @ MethodType(_, restpe)) => !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, restpe))
+ case _ => isAsSpecificValueType(ftpe1, ftpe2, Nil, Nil)
+ }
+ ftpe1 match {
+ case OverloadedType(pre, alts) => alts exists (alt => isAsSpecific(pre memberType alt, ftpe2))
+ case et: ExistentialType => isAsSpecific(et.skolemizeExistential, ftpe2)
+ case NullaryMethodType(restpe) => isAsSpecific(restpe, ftpe2)
+ case mt @ MethodType(_, restpe) if mt.isImplicit => isAsSpecific(restpe, ftpe2)
+ case mt @ MethodType(_, _) if bothAreVarargs => checkIsApplicable(mt.paramTypes mapConserve repeatedToSingle)
+ case mt @ MethodType(params, _) if params.nonEmpty => checkIsApplicable(mt.paramTypes)
+ case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+ case PolyType(tparams, mt @ MethodType(_, restpe)) if mt.isImplicit => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+ case PolyType(_, mt @ MethodType(params, _)) if params.nonEmpty => checkIsApplicable(mt.paramTypes)
+ case ErrorType => true
+ case _ => onRight
+ }
}
- private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = (tpe1, tpe2) match {
- case (PolyType(tparams1, rtpe1), _) =>
+ private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = tpe1 match {
+ case PolyType(tparams1, rtpe1) =>
isAsSpecificValueType(rtpe1, tpe2, undef1 ::: tparams1, undef2)
- case (_, PolyType(tparams2, rtpe2)) =>
- isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
- case _ =>
- existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+ case _ =>
+ tpe2 match {
+ case PolyType(tparams2, rtpe2) => isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
+ case _ => existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+ }
}
-
-/*
- def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type): Boolean =
- ftpe1.isError || isAsSpecific(ftpe1, ftpe2) &&
- (!isAsSpecific(ftpe2, ftpe1) ||
- !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
- phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
-*/
/** Is sym1 (or its companion class in case it is a module) a subclass of
* sym2 (or its companion class in case it is a module)?
*/
def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
- (sym1 != sym2) && (sym1 != NoSymbol) && (
- (sym1 isSubClass sym2)
- || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
- || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
- )
+ (sym1 ne sym2)
+ && (sym1 ne NoSymbol)
+ && ( (sym1 isSubClass sym2)
+ || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+ || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+ )
)
/** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
*/
- def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) =
- sym2 == NoSymbol || isProperSubClassOrObject(sym1.owner, sym2.owner)
+ def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) = (
+ (sym2 eq NoSymbol)
+ || isProperSubClassOrObject(sym1.safeOwner, sym2.owner)
+ )
def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type, sym1: Symbol, sym2: Symbol): Boolean = {
// ftpe1 / ftpe2 are OverloadedTypes (possibly with one single alternative) if they
@@ -1017,90 +943,34 @@ trait Infer extends Checkable {
(!phase.erasedTypes || covariantReturnOverride(ftpe1, ftpe2))) 1 else 0)
val subClassCount = (if (isInProperSubClassOrObject(sym1, sym2)) 1 else 0) -
(if (isInProperSubClassOrObject(sym2, sym1)) 1 else 0)
-// println("is more specific? "+sym1+":"+ftpe1+sym1.locationString+"/"+sym2+":"+ftpe2+sym2.locationString+":"+
-// specificCount+"/"+subClassCount)
specificCount + subClassCount > 0
}
}
-/*
- ftpe1.isError || {
- if (isAsSpecific(ftpe1, ftpe2))
- (!isAsSpecific(ftpe2, ftpe1) ||
- isProperSubClassOrObject(sym1.owner, sym2.owner) ||
- !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
- phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
- else
- !isAsSpecific(ftpe2, ftpe1) &&
- isProperSubClassOrObject(sym1.owner, sym2.owner)
- }
-*/
- private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = (ftpe1, ftpe2) match {
- case (MethodType(_, rtpe1), MethodType(_, rtpe2)) =>
- rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
- case _ =>
- false
- }
-/*
- /** Is type `tpe1` a strictly better expression alternative than type `tpe2`?
- */
- def isStrictlyBetterExpr(tpe1: Type, tpe2: Type) = {
- isMethod(tpe2) && !isMethod(tpe1) ||
- isNullary(tpe1) && !isNullary(tpe2) ||
- isStrictlyBetter(tpe1, tpe2)
- }
- /** Is type `tpe1` a strictly better alternative than type `tpe2`?
- * non-methods are always strictly better than methods
- * nullary methods are always strictly better than non-nullary
- * if both are non-nullary methods, then tpe1 is strictly better than tpe2 if
- * - tpe1 specializes tpe2 and tpe2 does not specialize tpe1
- * - tpe1 and tpe2 specialize each other and tpe1 has a strictly better resulttype than
- * tpe2
- */
- def isStrictlyBetter(tpe1: Type, tpe2: Type) = {
- def isNullary(tpe: Type): Boolean = tpe match {
- case tp: RewrappingTypeProxy => isNullary(tp.underlying)
- case _ => tpe.paramSectionCount == 0 || tpe.params.isEmpty
- }
- def isMethod(tpe: Type): Boolean = tpe match {
- case tp: RewrappingTypeProxy => isMethod(tp.underlying)
- case MethodType(_, _) | PolyType(_, _) => true
- case _ => false
- }
- def hasStrictlyBetterResult =
- resultIsBetter(tpe1, tpe2, List(), List()) && !resultIsBetter(tpe2, tpe1, List(), List())
- if (!isMethod(tpe1))
- isMethod(tpe2) || hasStrictlyBetterResult
-
- isNullary(tpe1) && !isNullary(tpe2) ||
- is
-
- else if (isNullary(tpe1))
- isMethod(tpe2) && (!isNullary(tpe2) || hasStrictlyBetterResult)
- else
- specializes(tpe1, tpe2) && (!specializes(tpe2, tpe1) || hasStrictlyBetterResult)
+ private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
+ case MethodType(_, rtpe1) =>
+ ftpe2 match {
+ case MethodType(_, rtpe2) => rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
+ case _ => false
+ }
+ case _ => false
}
-*/
/** error if arguments not within bounds. */
- def checkBounds(tree: Tree, pre: Type, owner: Symbol,
- tparams: List[Symbol], targs: List[Type], prefix: String): Boolean =
- if ((targs exists (_.isErroneous)) || (tparams exists (_.isErroneous))) true
- else {
- //@M validate variances & bounds of targs wrt variances & bounds of tparams
- //@M TODO: better place to check this?
- //@M TODO: errors for getters & setters are reported separately
- val kindErrors = checkKindBounds(tparams, targs, pre, owner)
- kindErrors match {
- case Nil =>
- def notWithinBounds() = NotWithinBounds(tree, prefix, targs, tparams, Nil)
- isWithinBounds(pre, owner, tparams, targs) || {notWithinBounds(); false}
- case errors =>
- def kindBoundErrors() = KindBoundErrors(tree, prefix, targs, tparams, errors)
- (targs contains WildcardType) || {kindBoundErrors(); false}
- }
+ def checkBounds(tree: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type], prefix: String): Boolean = {
+ def issueBoundsError() = { NotWithinBounds(tree, prefix, targs, tparams, Nil) ; false }
+ def issueKindBoundErrors(errs: List[String]) = { KindBoundErrors(tree, prefix, targs, tparams, errs) ; false }
+ //@M validate variances & bounds of targs wrt variances & bounds of tparams
+ //@M TODO: better place to check this?
+ //@M TODO: errors for getters & setters are reported separately
+ def check() = checkKindBounds(tparams, targs, pre, owner) match {
+ case Nil => isWithinBounds(pre, owner, tparams, targs) || issueBoundsError()
+ case errs => (targs contains WildcardType) || issueKindBoundErrors(errs)
}
+ targs.exists(_.isErroneous) || tparams.exists(_.isErroneous) || check()
+ }
+
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
checkKindBounds0(tparams, targs, pre, owner, explainErrors = true) map {
case (targ, tparam, kindErrors) =>
@@ -1126,9 +996,9 @@ trait Infer extends Checkable {
"lenientPt" -> lenientPt
)
)
- var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)._1
+ var targs = exprTypeArgs(undetparams, tree.tpe, strictPt, useWeaklyCompatible = false)
if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt))
- targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)._1
+ targs = exprTypeArgs(undetparams, tree.tpe, lenientPt, useWeaklyCompatible = false)
substExpr(tree, undetparams, targs, lenientPt)
printInference("[inferArgumentInstance] finished, targs = " + targs)
@@ -1140,8 +1010,9 @@ trait Infer extends Checkable {
* If passed, infers against specified type `treeTp` instead of `tree.tp`.
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
- val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
- val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
+ val treeTp = if (treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+ val tvars = tparams map freshVar
+ val targs = exprTypeArgs(tvars, tparams, treeTp, pt, useWeaklyCompatible)
printInference(
ptBlock("inferExprInstance",
"tree" -> tree,
@@ -1200,7 +1071,7 @@ trait Infer extends Checkable {
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
val formals = formalTypes(mt.paramTypes, args.length)
- val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst)))
+ val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst)))
val restpe = fn.tpe.resultType(argtpes)
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
@@ -1303,17 +1174,14 @@ trait Infer extends Checkable {
}
} else None
- val inferred = inferFor(pt) orElse inferForApproxPt
-
- inferred match {
+ inferFor(pt) orElse inferForApproxPt match {
case Some(targs) =>
- new TreeTypeSubstituter(undetparams, targs).traverse(tree)
- notifyUndetparamsInferred(undetparams, targs)
+ new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+ notifyUndetparamsInferred(undetparams, targs)
case _ =>
- def full = if (isFullyDefined(pt)) "(fully defined)" else "(not fully defined)"
- devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt $full")
- // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
- ConstrInstantiationError(tree, resTp, pt)
+ def not = if (isFullyDefined(pt)) "" else "not "
+ devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt (${not}fully defined)")
+ ConstrInstantiationError(tree, resTp, pt)
}
}
@@ -1484,38 +1352,24 @@ trait Infer extends Checkable {
/* -- Overload Resolution ---------------------------------------------- */
-/*
- def checkNotShadowed(pos: Position, pre: Type, best: Symbol, eligible: List[Symbol]) =
- if (!phase.erasedTypes)
- for (alt <- eligible) {
- if (isProperSubClassOrObject(alt.owner, best.owner))
- error(pos,
- "erroneous reference to overloaded definition,\n"+
- "most specific definition is: "+best+best.locationString+" of type "+pre.memberType(best)+
- ",\nyet alternative definition "+alt+alt.locationString+" of type "+pre.memberType(alt)+
- "\nis defined in a subclass")
- }
-*/
-
/** Assign `tree` the symbol and type of the alternative which
* matches prototype `pt`, if it exists.
* If several alternatives match `pt`, take parameterless one.
* If no alternative matches `pt`, take the parameterless one anyway.
*/
- def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
- val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
+ def inferExprAlternative(tree: Tree, pt: Type): Tree = {
+ def tryOurBests(pre: Type, alts: List[Symbol], isSecondTry: Boolean): Unit = {
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt))
val alts1 = if (alts0.isEmpty) alts else alts0
-
val bests = bestAlternatives(alts1) { (sym1, sym2) =>
- val tp1 = pre.memberType(sym1)
- val tp2 = pre.memberType(sym2)
+ val tp1 = pre memberType sym1
+ val tp2 = pre memberType sym2
- ( tp2 == ErrorType
- || (!isWeaklyCompatible(tp2, pt) && isWeaklyCompatible(tp1, pt))
+ ( (tp2 eq ErrorType)
+ || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt)
|| isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
)
- }
+ }
// todo: missing test case for bests.isEmpty
bests match {
case best :: Nil => tree setSymbol best setType (pre memberType best)
@@ -1527,18 +1381,12 @@ trait Infer extends Checkable {
if (pt.isErroneous) setError(tree)
else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
- }
}
}
-
- @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
- val oldState = context.state
- context.setBufferErrors()
- val res = expr
- val contextWithErrors = context.hasErrors
- context.flushBuffer()
- context.restoreState(oldState)
- res && !contextWithErrors
+ tree.tpe match {
+ case OverloadedType(pre, alts) => tryTwice(tryOurBests(pre, alts, _)) ; tree
+ case _ => tree
+ }
}
// Checks against the name of the parameter and also any @deprecatedName.
@@ -1573,7 +1421,7 @@ trait Infer extends Checkable {
val namesMatch = namesOfNamedArguments(argtpes) match {
case Nil => Nil
case names => eligible filter (m => names forall (name => m.info.params exists (p => paramMatchesName(p, name))))
- }
+ }
if (namesMatch.nonEmpty)
namesMatch
else if (eligible.isEmpty || eligible.tail.isEmpty)
@@ -1582,7 +1430,7 @@ trait Infer extends Checkable {
eligible filter (alt =>
!alt.hasDefault && isApplicableBasedOnArity(alt.tpe, argtpes.length, varargsStar, tuplingAllowed = true)
)
- }
+ }
/** Assign `tree` the type of an alternative which is applicable
* to `argtpes`, and whose result type is compatible with `pt`.
@@ -1606,21 +1454,21 @@ trait Infer extends Checkable {
val argtpes = argtpes0 mapConserve {
case RepeatedType(tp) => varargsStar = true ; tp
case tp => tp
- }
+ }
def followType(sym: Symbol) = followApply(pre memberType sym)
def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
- val applicable0 = alts filter (alt => inSilentMode(context)(isApplicable(undetparams, followType(alt), argtpes, pt)))
+ val applicable0 = alts filter (alt => context inSilentMode isApplicable(undetparams, followType(alt), argtpes, pt))
val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar)
val ranked = bestAlternatives(applicable)((sym1, sym2) =>
isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
)
ranked match {
- case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
- case best :: Nil => tree setSymbol best setType (pre memberType best) // success
- case Nil if pt eq WildcardType => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
- case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
- }
- }
+ case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
+ case best :: Nil => tree setSymbol best setType (pre memberType best) // success
+ case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
+ case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
+ }
+ }
// This potentially makes up to four attempts: tryTwice may execute
// with and without views enabled, and bestForExpectedType will try again
// with pt = WildcardType if it fails with pt != WildcardType.
@@ -1628,7 +1476,7 @@ trait Infer extends Checkable {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
bestForExpectedType(pt, isLastTry)
- }
+ }
}
/** Try inference twice, once without views and once with views,
@@ -1636,7 +1484,7 @@ trait Infer extends Checkable {
*/
def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
- val saved = context.state
+ val savedContextMode = context.contextMode
var fallback = false
context.setBufferErrors()
// We cache the current buffer because it is impossible to
@@ -1650,17 +1498,17 @@ trait Infer extends Checkable {
context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
fallback = true
- context.restoreState(saved)
+ context.contextMode = savedContextMode
context.flushBuffer()
infer(true)
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError => // recoverable cyclic references
- context.restoreState(saved)
+ context.contextMode = savedContextMode
if (!fallback) infer(true) else ()
} finally {
- context.restoreState(saved)
+ context.contextMode = savedContextMode
context.updateBuffer(errorsToRestore)
}
}
@@ -1668,44 +1516,41 @@ trait Infer extends Checkable {
}
/** Assign `tree` the type of all polymorphic alternatives
- * with `nparams` as the number of type parameters, if it exists.
+ * which have the same number of type parameters as does `argtypes`
+ * with all argtypes are within the corresponding type parameter bounds.
* If no such polymorphic alternative exist, error.
*/
def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
val OverloadedType(pre, alts) = tree.tpe
- val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
- def fail(kind: PolyAlternativeErrorKind.ErrorType) =
- PolyAlternativeError(tree, argtypes, sym0, kind)
-
- if (sym0 == NoSymbol) return (
- if (alts exists (_.typeParams.nonEmpty))
- fail(PolyAlternativeErrorKind.WrongNumber)
- else fail(PolyAlternativeErrorKind.NoParams))
-
- val (resSym, resTpe) = {
- if (!sym0.isOverloaded)
- (sym0, pre.memberType(sym0))
- else {
- val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))
- if (sym == NoSymbol) {
- if (argtypes forall (x => !x.isErroneous))
- fail(PolyAlternativeErrorKind.ArgsDoNotConform)
- return
- }
- else if (sym.isOverloaded) {
- val xs = sym.alternatives
- val tparams = newAsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
- val bounds = tparams map (_.tpeHK) // see e.g., #1236
- val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs))
-
- (sym setInfo tpe, tpe)
- }
- else (sym, pre.memberType(sym))
- }
+ // Alternatives with a matching length type parameter list
+ val matchingLength = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
+ def allMonoAlts = alts forall (_.typeParams.isEmpty)
+ def errorKind = matchingLength match {
+ case NoSymbol if allMonoAlts => PolyAlternativeErrorKind.NoParams // no polymorphic method alternative
+ case NoSymbol => PolyAlternativeErrorKind.WrongNumber // wrong number of tparams
+ case _ => PolyAlternativeErrorKind.ArgsDoNotConform // didn't conform to bounds
+ }
+ def fail() = PolyAlternativeError(tree, argtypes, matchingLength, errorKind)
+ def finish(sym: Symbol, tpe: Type) = tree setSymbol sym setType tpe
+ // Alternatives which conform to bounds
+ def checkWithinBounds(sym: Symbol) = sym.alternatives match {
+ case Nil if argtypes.exists(_.isErroneous) =>
+ case Nil => fail()
+ case alt :: Nil => finish(alt, pre memberType alt)
+ case alts @ (hd :: _) =>
+ log(s"Attaching AntiPolyType-carrying overloaded type to $sym")
+ // Multiple alternatives which are within bounds; spin up an
+ // overloaded type which carries an "AntiPolyType" as a prefix.
+ val tparams = newAsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
+ val bounds = tparams map (_.tpeHK) // see e.g., #1236
+ val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), alts))
+ finish(sym setInfo tpe, tpe)
+ }
+ matchingLength.alternatives match {
+ case Nil => fail()
+ case alt :: Nil => finish(alt, pre memberType alt)
+ case _ => checkWithinBounds(matchingLength filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes)))
}
- // Side effects tree with symbol and type
- tree setSymbol resSym setType resTpe
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 2dbfa1d0d3..86ba3d2164 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -1,6 +1,7 @@
package scala.tools.nsc
package typechecker
+import java.lang.Math.min
import symtab.Flags._
import scala.tools.nsc.util._
import scala.reflect.runtime.ReflectionUtils
@@ -9,7 +10,12 @@ import scala.reflect.ClassTag
import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
import scala.util.control.ControlThrowable
-import scala.reflect.macros.runtime.AbortMacroException
+import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes}
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.compiler.DefaultMacroCompiler
+import scala.tools.reflect.FastTrack
+import scala.runtime.ScalaRunTime
+import Fingerprint._
/**
* Code to deal with macros, namely with:
@@ -36,7 +42,7 @@ import scala.reflect.macros.runtime.AbortMacroException
* (Expr(elems))
* (TypeTag(Int))
*/
-trait Macros extends scala.tools.reflect.FastTrack with Traces {
+trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
self: Analyzer =>
import global._
@@ -74,7 +80,9 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* Includes a path to load the implementation via Java reflection,
* and various accounting information necessary when composing an argument list for the reflective invocation.
*/
- private case class MacroImplBinding(
+ case class MacroImplBinding(
+ // Is this macro impl a bundle (a trait extending Macro) or a vanilla def?
+ val isBundle: Boolean,
// Java class name of the class that contains the macro implementation
// is used to load the corresponding object with Java reflection
className: String,
@@ -82,14 +90,22 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
// `className` and `methName` are all we need to reflectively invoke a macro implementation
// because macro implementations cannot be overloaded
methName: String,
- // flattens the macro impl's parameter lists having symbols replaced with metadata
- // currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
- // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
- // `signature` will be equal to List(-1, -1, 0, 1)
- signature: List[Int],
+ // flattens the macro impl's parameter lists having symbols replaced with their fingerprints
+ // currently fingerprints are calculated solely from types of the symbols:
+ // * c.Expr[T] => LiftedTyped
+ // * c.Tree => LiftedUntyped
+ // * c.WeakTypeTag[T] => Tagged(index of the type parameter corresponding to that type tag)
+ // * everything else (e.g. scala.reflect.macros.Context) => Other
+ // f.ex. for: def impl[T: WeakTypeTag, U, V: WeakTypeTag](c: Context)(x: c.Expr[T], y: c.Tree): (U, V) = ???
+ // `signature` will be equal to List(List(Other), List(LiftedTyped, LiftedUntyped), List(Tagged(0), Tagged(2)))
+ signature: List[List[Fingerprint]],
// type arguments part of a macro impl ref (the right-hand side of a macro definition)
// these trees don't refer to a macro impl, so we can pickle them as is
- targs: List[Tree])
+ targs: List[Tree]) {
+
+ // Was this binding derived from a `def ... = macro ???` definition?
+ def is_??? = className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded
+ }
/** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
* with synthetic content that carries the payload described in `MacroImplBinding`.
@@ -102,31 +118,35 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
*
* @scala.reflect.macros.internal.macroImpl(
* `macro`(
- * "signature" = List(-1),
+ * "isBundle" = false,
+ * "signature" = List(Other),
* "methodName" = "impl",
- * "versionFormat" = 1,
+ * "versionFormat" = <current version format>,
* "className" = "Macros$"))
*/
- private object MacroImplBinding {
- val versionFormat = 1
+ object MacroImplBinding {
+ val versionFormat = 5.0
def pickleAtom(obj: Any): Tree =
obj match {
case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
case s: String => Literal(Constant(s))
- case i: Int => Literal(Constant(i))
+ case d: Double => Literal(Constant(d))
+ case b: Boolean => Literal(Constant(b))
+ case f: Fingerprint => Literal(Constant(f.value))
}
def unpickleAtom(tree: Tree): Any =
tree match {
case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom
case Literal(Constant(s: String)) => s
- case Literal(Constant(i: Int)) => i
+ case Literal(Constant(d: Double)) => d
+ case Literal(Constant(b: Boolean)) => b
+ case Literal(Constant(i: Int)) => new Fingerprint(i)
}
def pickle(macroImplRef: Tree): Tree = {
- val MacroImplReference(owner, macroImpl, targs) = macroImplRef
- val paramss = macroImpl.paramss
+ val MacroImplReference(isBundle, owner, macroImpl, targs) = macroImplRef
// todo. refactor when fixing SI-5498
def className: String = {
@@ -142,13 +162,21 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
loop(owner)
}
- def signature: List[Int] = {
- val transformed = transformTypeTagEvidenceParams(paramss, (param, tparam) => tparam)
- transformed.flatten map (p => if (p.isTerm) -1 else p.paramPos)
+ def signature: List[List[Fingerprint]] = {
+ def fingerprint(tpe: Type): Fingerprint = tpe.dealiasWiden match {
+ case TypeRef(_, RepeatedParamClass, underlying :: Nil) => fingerprint(underlying)
+ case ExprClassOf(_) => LiftedTyped
+ case TreeType() => LiftedUntyped
+ case _ => Other
+ }
+
+ val transformed = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => tparam)
+ mmap(transformed)(p => if (p.isTerm) fingerprint(p.info) else Tagged(p.paramPos))
}
val payload = List[(String, Any)](
"versionFormat" -> versionFormat,
+ "isBundle" -> isBundle,
"className" -> className,
"methodName" -> macroImpl.name.toString,
"signature" -> signature
@@ -188,350 +216,119 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val Apply(_, pickledPayload) = wrapped
val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
- val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int]
- if (versionFormat != pickleVersionFormat) throw new Error(s"macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat")
+ def fail(msg: String) = abort(s"bad macro impl binding: $msg")
+ def unpickle[T](field: String, clazz: Class[T]): T = {
+ def failField(msg: String) = fail(s"$field $msg")
+ if (!payload.contains(field)) failField("is supposed to be there")
+ val raw: Any = payload(field)
+ if (raw == null) failField(s"is not supposed to be null")
+ val expected = ScalaRunTime.box(clazz)
+ val actual = raw.getClass
+ if (!expected.isAssignableFrom(actual)) failField(s"has wrong type: expected $expected, actual $actual")
+ raw.asInstanceOf[T]
+ }
+
+ val pickleVersionFormat = unpickle("versionFormat", classOf[Double])
+ if (versionFormat != pickleVersionFormat) fail(s"expected version format $versionFormat, actual $pickleVersionFormat")
- val className = payload("className").asInstanceOf[String]
- val methodName = payload("methodName").asInstanceOf[String]
- val signature = payload("signature").asInstanceOf[List[Int]]
- MacroImplBinding(className, methodName, signature, targs)
+ val isBundle = unpickle("isBundle", classOf[Boolean])
+ val className = unpickle("className", classOf[String])
+ val methodName = unpickle("methodName", classOf[String])
+ val signature = unpickle("signature", classOf[List[List[Fingerprint]]])
+ MacroImplBinding(isBundle, className, methodName, signature, targs)
}
}
- private def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
+ def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
val pickle = MacroImplBinding.pickle(macroImplRef)
macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil)
}
- private def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
+ def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
val Some(AnnotationInfo(_, List(pickle), _)) = macroDef.getAnnotation(MacroImplAnnotation)
MacroImplBinding.unpickle(pickle)
}
- /** Transforms parameters lists of a macro impl.
- * The `transform` function is invoked only for WeakTypeTag evidence parameters.
- *
- * The transformer takes two arguments: a value parameter from the parameter list
- * and a type parameter that is witnesses by the value parameter.
- *
- * If the transformer returns a NoSymbol, the value parameter is not included from the result.
- * If the transformer returns something else, this something else is included in the result instead of the value parameter.
- *
- * Despite of being highly esoteric, this function significantly simplifies signature analysis.
- * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
- * or to streamline creation of the list of macro arguments.
- */
- private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
- if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
- if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
- def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
- case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
- if c == paramss.head.head && universe == MacroContextUniverse =>
- transform(param, targ.typeSymbol)
- case _ =>
- param
- }
- val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
- if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
- }
-
- def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroImpl: Symbol): Type = {
- // Step I. Transform c.Expr[T] to T
- var runtimeType = macroImpl.tpe.finalResultType.dealias match {
- case TypeRef(_, ExprClass, runtimeType :: Nil) => runtimeType
- case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
- }
-
- // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
- runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, loadMacroImplBinding(macroDdef.symbol).targs.map(_.tpe))
-
- // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
- def unsigma(tpe: Type): Type =
- transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol) match {
- case (implCtxParam :: Nil) :: implParamss =>
- val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
- object UnsigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
- ThisType(macroDdef.symbol.owner)
- case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
- implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ def computeMacroDefTypeFromMacroImplRef(macroDdef: DefDef, macroImplRef: Tree): Type = {
+ macroImplRef match {
+ case MacroImplReference(_, _, macroImpl, targs) =>
+ // Step I. Transform c.Expr[T] to T and everything else to Any
+ var runtimeType = decreaseMetalevel(macroImpl.info.finalResultType)
+
+ // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
+ runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, targs map (_.tpe))
+
+ // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
+ def unsigma(tpe: Type): Type =
+ transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol) match {
+ case (implCtxParam :: Nil) :: implParamss =>
+ val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
+ object UnsigmaTypeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = pre match {
+ case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
+ ThisType(macroDdef.symbol.owner)
+ case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
+ implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ case _ =>
+ pre
+ }
+ val args1 = args map mapOver
+ TypeRef(pre1, sym, args1)
case _ =>
- pre
+ mapOver(tp)
}
- val args1 = args map mapOver
- TypeRef(pre1, sym, args1)
- case _ =>
- mapOver(tp)
- }
- }
-
- UnsigmaTypeMap(tpe)
- case _ =>
- tpe
- }
+ }
- unsigma(runtimeType)
- }
+ UnsigmaTypeMap(tpe)
+ case _ =>
+ tpe
+ }
- /** A reference macro implementation signature compatible with a given macro definition.
- *
- * In the example above for the following macro def:
- * def foo[T](xs: List[T]): T = macro fooBar
- *
- * This function will return:
- * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]]): c.Expr[T]
- *
- * Note that type tag evidence parameters are not included into the result.
- * Type tag context bounds for macro impl tparams are optional.
- * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
- *
- * @param macroDef The macro definition symbol
- * @param tparams The type parameters of the macro definition
- * @param vparamss The value parameters of the macro definition
- * @param retTpe The return type of the macro definition
- */
- private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
- // had to move method's body to an object because of the recursive dependencies between sigma and param
- object SigGenerator {
- def WeakTagClass = getMember(MacroContextClass, tpnme.WeakTypeTag)
- def ExprClass = getMember(MacroContextClass, tpnme.Expr)
- val cache = scala.collection.mutable.Map[Symbol, Symbol]()
- val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
- val paramss = List(ctxParam) :: mmap(vparamss)(param)
- val implReturnType = typeRef(singleType(NoPrefix, ctxParam), ExprClass, List(sigma(retTpe)))
-
- object SigmaTypeMap extends TypeMap {
- def mapPrefix(pre: Type) = pre match {
- case ThisType(sym) if sym == macroDef.owner =>
- singleType(singleType(singleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
- case SingleType(NoPrefix, sym) =>
- mfind(vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue))
- case _ =>
- mapOver(pre)
- }
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = mapPrefix(pre)
- val args1 = mapOverArgs(args, sym.typeParams)
- if ((pre eq pre1) && (args eq args1)) tp
- else typeRef(pre1, sym, args1)
- case _ =>
- mapOver(tp)
- }
- }
- def sigma(tpe: Type): Type = SigmaTypeMap(tpe)
-
- def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) =
- macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe
- def implType(isType: Boolean, origTpe: Type): Type = {
- def tsym = if (isType) WeakTagClass else ExprClass
- def targ = origTpe.typeArgs.headOption getOrElse NoType
-
- if (isRepeatedParamType(origTpe))
- scalaRepeatedType(implType(isType, sigma(targ)))
- else
- typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
- }
- def param(tree: Tree): Symbol = (
- cache.getOrElseUpdate(tree.symbol, {
- val sym = tree.symbol
- makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
- })
- )
+ unsigma(runtimeType)
+ case _ =>
+ ErrorType
}
-
- import SigGenerator._
- macroTraceVerbose("generating macroImplSigs for: ")(macroDef)
- macroTraceVerbose("tparams are: ")(tparams)
- macroTraceVerbose("vparamss are: ")(vparamss)
- macroTraceVerbose("retTpe is: ")(retTpe)
- macroTraceVerbose("macroImplSig is: ")((paramss, implReturnType))
}
- /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
+ /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method or a top-level macro bundle,
* and that that method is signature-wise compatible with the given macro definition.
*
- * @return Typechecked rhs of the given macro definition if everything is okay.
+ * @return Macro impl reference for the given macro definition if everything is okay.
* EmptyTree if an error occurs.
*/
- def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree =
- try new MacroTyper(typer, macroDdef).typed
- catch { case MacroBodyTypecheckException => EmptyTree }
-
- class MacroTyper(val typer: Typer, val macroDdef: DefDef) extends MacroErrors {
- // Phase I: sanity checks
+ def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = {
val macroDef = macroDdef.symbol
- macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
assert(macroDef.isMacro, macroDdef)
- if (fastTrack contains macroDef) MacroDefIsFastTrack()
- if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
-
- // we use typed1 instead of typed, because otherwise adapt is going to mess us up
- // if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
- // unfortunately, this means that we have to manually trigger macro expansion
- // because it's adapt which is responsible for automatic expansion during typechecking
- def typecheckRhs(rhs: Tree): Tree = {
- try {
- // interestingly enough, just checking isErroneous doesn't cut it
- // e.g. a "type arguments [U] do not conform to method foo's type parameter bounds" error
- // doesn't manifest itself as an error in the resulting tree
- val prevNumErrors = reporter.ERROR.count
- var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
- def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isMacro && !rhs1.symbol.isErroneous
- while (rhsNeedsMacroExpansion) {
- rhs1 = macroExpand1(typer, rhs1) match {
- case Success(expanded) =>
- try {
- val typechecked = typer.typed1(expanded, EXPRmode, WildcardType)
- macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
- typechecked
- } finally {
- popMacroContext()
- }
- case Fallback(fallback) =>
- typer.typed1(fallback, EXPRmode, WildcardType)
- case Delayed(delayed) =>
- delayed
- case Skipped(skipped) =>
- skipped
- case Failure(failure) =>
- failure
- }
- }
- val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
- if (typecheckedWithErrors) MacroDefUntypeableBodyError()
- rhs1
- } catch {
- case ex: TypeError =>
- typer.reportTypeError(context, rhs.pos, ex)
- MacroDefUntypeableBodyError()
- }
- }
-
- // Phase II: typecheck the right-hand side of the macro def
- val typed = typecheckRhs(macroDdef.rhs)
- typed match {
- case MacroImplReference(owner, meth, targs) =>
- if (!meth.isMethod) MacroDefInvalidBodyError()
- if (!meth.isPublic) MacroImplNotPublicError()
- if (meth.isOverloaded) MacroImplOverloadedError()
- if (!owner.isStaticOwner && !owner.moduleClass.isStaticOwner) MacroImplNotStaticError()
- if (meth.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError(typed)
- bindMacroImpl(macroDef, typed)
- case _ =>
- MacroDefInvalidBodyError()
- }
-
- // Phase III: check compatibility between the macro def and its macro impl
- // this check ignores type tag evidence parameters, because type tag context bounds are optional
- // aXXX (e.g. aparamss) => characteristics of the macro impl ("a" stands for "actual")
- // rXXX (e.g. rparamss) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
- val macroImpl = typed.symbol
- val aparamss = transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol)
- val aret = macroImpl.tpe.finalResultType
- val macroDefRet =
- if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
- else computeMacroDefTypeFromMacroImpl(macroDdef, macroImpl)
- val (rparamss, rret) = macroImplSig(macroDef, macroDdef.tparams, macroDdef.vparamss, macroDefRet)
-
- val implicitParams = aparamss.flatten filter (_.isImplicit)
- if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
- if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
-
- val atparams = macroImpl.typeParams
- val atvars = atparams map freshVar
- def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
-
- try {
- map2(aparamss, rparamss)((aparams, rparams) => {
- if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
- if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
- })
-
- // cannot fuse these loops because if aparamss.flatten != rparamss.flatten
- // then `atpeToRtpe` is going to fail with an unsound substitution
- map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
- if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
- if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
- val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
- case tpe => tpe
- }
- checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
- })
-
- checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
-
- val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
- val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
- val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
- boundsOk match {
- case SilentResultValue(true) => // do nothing, success
- case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
- }
- } catch {
- case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
- }
- }
- /** Macro classloader that is used to resolve and run macro implementations.
- * Loads classes from from -cp (aka the library classpath).
- * Is also capable of detecting REPL and reusing its classloader.
- */
- lazy val macroClassloader: ClassLoader = findMacroClassLoader()
-
- /** Produces a function that can be used to invoke macro implementation for a given macro definition:
- * 1) Looks up macro implementation symbol in this universe.
- * 2) Loads its enclosing class from the macro classloader.
- * 3) Loads the companion of that enclosing class from the macro classloader.
- * 4) Resolves macro implementation within the loaded companion.
- *
- * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
- * `null` otherwise.
- */
- type MacroRuntime = MacroArgs => Any
- private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]()
- private def macroRuntime(macroDef: Symbol): MacroRuntime = {
- macroTraceVerbose("looking for macro implementation: ")(macroDef)
+ macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
if (fastTrack contains macroDef) {
- macroLogVerbose("macro expansion is serviced by a fast track")
- fastTrack(macroDef)
+ macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
+ assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
+ EmptyTree
} else {
- macroRuntimesCache.getOrElseUpdate(macroDef, {
- val binding = loadMacroImplBinding(macroDef)
- val className = binding.className
- val methName = binding.methName
- macroLogVerbose(s"resolved implementation as $className.$methName")
-
- // I don't use Scala reflection here, because it seems to interfere with JIT magic
- // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
- // I'm not sure what's the reason - for me it's pure voodoo
- // upd. my latest experiments show that everything's okay
- // it seems that in 2.10.1 we can easily switch to Scala reflection
- try {
- macroTraceVerbose("loading implementation class: ")(className)
- macroTraceVerbose("classloader is: ")(ReflectionUtils.show(macroClassloader))
- val implObj = ReflectionUtils.staticSingletonInstance(macroClassloader, className)
- // relies on the fact that macro impls cannot be overloaded
- // so every methName can resolve to at maximum one method
- val implMeths = implObj.getClass.getDeclaredMethods.find(_.getName == methName)
- val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
- macroLogVerbose("successfully loaded macro impl as (%s, %s)".format(implObj, implMeth))
- args => implMeth.invoke(implObj, ((args.c +: args.others) map (_.asInstanceOf[AnyRef])): _*)
- } catch {
- case ex: Exception =>
- macroTraceVerbose(s"macro runtime failed to load: ")(ex.toString)
- macroDef setFlag IS_ERROR
- null
- }
- })
+ def fail() = { if (macroDef != null) macroDef setFlag IS_ERROR; macroDdef setType ErrorType; EmptyTree }
+ def success(macroImplRef: Tree) = { bindMacroImpl(macroDef, macroImplRef); macroImplRef }
+
+ if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) {
+ macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
+ fail()
+ } else {
+ val macroDdef1: macroDdef.type = macroDdef
+ val typer1: typer.type = typer
+ val macroCompiler = new {
+ val global: self.global.type = self.global
+ val typer: self.global.analyzer.Typer = typer1.asInstanceOf[self.global.analyzer.Typer]
+ val macroDdef: self.global.DefDef = macroDdef1
+ } with DefaultMacroCompiler
+ val macroImplRef = macroCompiler.resolveMacroImpl
+ if (macroImplRef.isEmpty) fail() else success(macroImplRef)
+ }
}
}
- private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext = {
+ def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext = {
new {
val universe: self.global.type = self.global
val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
@@ -548,92 +345,112 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case class MacroArgs(c: MacroContext, others: List[Any])
private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
- val macroDef = expandee.symbol
- val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
- val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
- var typeArgs = List[Tree]()
- val exprArgs = ListBuffer[List[Expr[_]]]()
- def collectMacroArgs(tree: Tree): Unit = tree match {
- case Apply(fn, args) =>
- // todo. infer precise typetag for this Expr, namely the declared type of the corresponding macro impl argument
- exprArgs.prepend(args map (arg => context.Expr[Nothing](arg)(TypeTag.Nothing)))
- collectMacroArgs(fn)
- case TypeApply(fn, args) =>
- typeArgs = args
- collectMacroArgs(fn)
- case _ =>
- }
- collectMacroArgs(expandee)
-
- val argcDoesntMatch = macroDef.paramss.length != exprArgs.length
- val nullaryArgsEmptyParams = exprArgs.isEmpty && macroDef.paramss == ListOfNil
- if (argcDoesntMatch && !nullaryArgsEmptyParams) { typer.TyperErrorGen.MacroPartialApplicationError(expandee) }
+ val macroDef = expandee.symbol
+ val paramss = macroDef.paramss
+ val treeInfo.Applied(core, targs, argss) = expandee
+ val prefix = core match { case Select(qual, _) => qual; case _ => EmptyTree }
+ val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefix, expandee))
+
+ macroLogVerbose(sm"""
+ |context: $context
+ |prefix: $prefix
+ |targs: $targs
+ |argss: $argss
+ |paramss: $paramss
+ """.trim)
- val argss: List[List[Any]] = exprArgs.toList
- macroTraceVerbose("context: ")(context)
- macroTraceVerbose("argss: ")(argss)
+ import typer.TyperErrorGen._
+ val isNullaryArgsEmptyParams = argss.isEmpty && paramss == ListOfNil
+ if (paramss.length < argss.length) MacroTooManyArgumentListsError(expandee)
+ if (paramss.length > argss.length && !isNullaryArgsEmptyParams) MacroTooFewArgumentListsError(expandee)
- val preparedArgss: List[List[Any]] =
+ val macroImplArgs: List[Any] =
if (fastTrack contains macroDef) {
// Take a dry run of the fast track implementation
- if (fastTrack(macroDef) validate expandee) argss
- else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
+ if (fastTrack(macroDef) validate expandee) argss.flatten
+ else MacroTooFewArgumentListsError(expandee)
}
else {
- // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
- // consider the following example:
- //
- // class D[T] {
- // class C[U] {
- // def foo[V] = macro Impls.foo[T, U, V]
- // }
- // }
- //
- // val outer1 = new D[Int]
- // val outer2 = new outer1.C[String]
- // outer2.foo[Boolean]
- //
- // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
- // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
- // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
- val binding = loadMacroImplBinding(macroDef)
- macroTraceVerbose("binding: ")(binding)
- val tags = binding.signature filter (_ != -1) map (paramPos => {
- val targ = binding.targs(paramPos).tpe.typeSymbol
- val tpe = if (targ.isTypeParameterOrSkolem) {
- if (targ.owner == macroDef) {
- // doesn't work when macro def is compiled separately from its usages
- // then targ is not a skolem and isn't equal to any of macroDef.typeParams
- // val argPos = targ.deSkolemize.paramPos
- val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
- typeArgs(argPos).tpe
+ def calculateMacroArgs(binding: MacroImplBinding) = {
+ val signature = if (binding.isBundle) binding.signature else binding.signature.tail
+ macroLogVerbose(s"binding: $binding")
+
+ // STEP I: prepare value arguments of the macro expansion
+ // wrap argss in c.Expr if necessary (i.e. if corresponding macro impl param is of type c.Expr[T])
+ // expand varargs (nb! varargs can apply to any parameter section, not necessarily to the last one)
+ val trees = map3(argss, paramss, signature)((args, defParams, implParams) => {
+ val isVarargs = isVarArgsList(defParams)
+ if (isVarargs) {
+ if (defParams.length > args.length + 1) MacroTooFewArgumentsError(expandee)
+ } else {
+ if (defParams.length < args.length) MacroTooManyArgumentsError(expandee)
+ if (defParams.length > args.length) MacroTooFewArgumentsError(expandee)
+ }
+
+ val wrappedArgs = mapWithIndex(args)((arg, j) => {
+ val fingerprint = implParams(min(j, implParams.length - 1))
+ fingerprint match {
+ case LiftedTyped => context.Expr[Nothing](arg)(TypeTag.Nothing) // TODO: SI-5752
+ case LiftedUntyped => arg
+ case _ => abort(s"unexpected fingerprint $fingerprint in $binding with paramss being $paramss " +
+ s"corresponding to arg $arg in $argss")
+ }
+ })
+
+ if (isVarargs) {
+ val (normal, varargs) = wrappedArgs splitAt (defParams.length - 1)
+ normal :+ varargs // pack all varargs into a single Seq argument (varargs Scala style)
+ } else wrappedArgs
+ })
+ macroLogVerbose(s"trees: $trees")
+
+ // STEP II: prepare type arguments of the macro expansion
+ // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
+ // consider the following example:
+ //
+ // class D[T] {
+ // class C[U] {
+ // def foo[V] = macro Impls.foo[T, U, V]
+ // }
+ // }
+ //
+ // val outer1 = new D[Int]
+ // val outer2 = new outer1.C[String]
+ // outer2.foo[Boolean]
+ //
+ // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
+ // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
+ // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
+ val tags = signature.flatten collect { case f if f.isTag => f.paramPos } map (paramPos => {
+ val targ = binding.targs(paramPos).tpe.typeSymbol
+ val tpe = if (targ.isTypeParameterOrSkolem) {
+ if (targ.owner == macroDef) {
+ // doesn't work when macro def is compiled separately from its usages
+ // then targ is not a skolem and isn't equal to any of macroDef.typeParams
+ // val argPos = targ.deSkolemize.paramPos
+ val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
+ targs(argPos).tpe
+ } else
+ targ.tpe.asSeenFrom(
+ if (prefix == EmptyTree) macroDef.owner.tpe else prefix.tpe,
+ macroDef.owner)
} else
- targ.tpe.asSeenFrom(
- if (prefixTree == EmptyTree) macroDef.owner.tpe else prefixTree.tpe,
- macroDef.owner)
- } else
- targ.tpe
- context.WeakTypeTag(tpe)
- })
- macroTraceVerbose("tags: ")(tags)
-
- // transforms argss taking into account varargness of paramss
- // note that typetag context bounds are only declared on macroImpls
- // so this optional arglist might not match macroDef's paramlist
- // nb! varargs can apply to any parameter section, not necessarily to the last one
- mapWithIndex(argss :+ tags)((as, i) => {
- val mapsToParamss = macroDef.paramss.indices contains i
- if (mapsToParamss) {
- val ps = macroDef.paramss(i)
- if (isVarArgsList(ps)) {
- val (normal, varargs) = as splitAt (ps.length - 1)
- normal :+ varargs // pack all varargs into a single List argument
- } else as
- } else as
- })
+ targ.tpe
+ context.WeakTypeTag(tpe)
+ })
+ macroLogVerbose(s"tags: $tags")
+
+ // if present, tags always come in a separate parameter/argument list
+ // that's because macro impls can't have implicit parameters other than c.WeakTypeTag[T]
+ (trees :+ tags).flatten
+ }
+
+ val binding = loadMacroImplBinding(macroDef)
+ if (binding.is_???) Nil
+ else calculateMacroArgs(binding)
}
- macroTraceVerbose("preparedArgss: ")(preparedArgss)
- MacroArgs(context, preparedArgss.flatten)
+ macroLogVerbose(s"macroImplArgs: $macroImplArgs")
+ MacroArgs(context, macroImplArgs)
}
/** Keeps track of macros in-flight.
@@ -668,7 +485,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 1) If necessary desugars the `expandee` to fit into `macroExpand1`
*
* Then `macroExpand1`:
- * 2) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
+ * 2) Checks whether the expansion needs to be delayed
* 3) Loads macro implementation using `macroMirror`
* 4) Synthesizes invocation arguments for the macro implementation
* 5) Checks that the result is a tree or an expr bound to this universe
@@ -769,15 +586,38 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (macroDebugVerbose) println(s"typecheck #1 (against expectedTpe = $expectedTpe): $expanded")
val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, expectedTpe))
if (expanded1.isErrorTyped) {
- if (macroDebugVerbose) println(s"typecheck #1 has failed: ${typer.context.errBuffer}")
+ if (macroDebugVerbose) println(s"typecheck #1 has failed: ${typer.context.reportBuffer.errors}")
expanded1
} else {
if (macroDebugVerbose) println(s"typecheck #2 (against pt = $pt): $expanded1")
val expanded2 = typer.context.withImplicitsEnabled(super.onSuccess(expanded1))
- if (macroDebugVerbose && expanded2.isErrorTyped) println(s"typecheck #2 has failed: ${typer.context.errBuffer}")
+ if (macroDebugVerbose && expanded2.isErrorTyped) println(s"typecheck #2 has failed: ${typer.context.reportBuffer.errors}")
expanded2
}
}
+ override def onDelayed(delayed: Tree) = {
+ // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
+ // then there are two possible situations we're in:
+ // 1) We're in POLYmode, when the typer tests the waters wrt type inference
+ // (e.g. as in typedArgToPoly in doTypedApply).
+ // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
+ // (e.g. if we're an argument to a function call, then this means that no previous argument lists
+ // can determine our type variables for us).
+ //
+ // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
+ // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
+ //
+ // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
+ // the undetermined type params. Therefore we need to do something ourselves or otherwise this
+ // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
+ // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
+ // but sometimes, if the inferencer lacks information, it will be forced to approximate. This prevents
+ // an important class of macros, fundep materializers, from working, which I perceive is a problem we need to solve.
+ // For details see SI-7470.
+ val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
+ if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
+ else delayed
+ }
}
expander(expandee)
}
@@ -802,7 +642,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
withInfoLevel(nodePrinters.InfoLevel.Quiet) {
if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
- macroTraceVerbose("cancelled macro expansion because of %s: ".format(reason))(expandee)
+ macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
Cancel(typer.infer.setError(expandee))
}
else try {
@@ -853,6 +693,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
}
expanded match {
case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree)
+ case expanded: Tree if expandee.symbol.isTermMacro => validateResultingTree(expanded)
case _ => MacroExpansionHasInvalidTypeError(expandee, expanded)
}
} catch {
@@ -877,7 +718,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
import typer.TyperErrorGen._
val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
- macroTraceLite("falling back to: ")(fallbackSym)
+ macroLogLite(s"falling back to: $fallbackSym")
def mkFallbackTree(tree: Tree): Tree = {
tree match {
@@ -929,7 +770,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
undetparams --= undetNoMore map (_.id)
if (undetparams.isEmpty) {
hasPendingMacroExpansions = true
- macroTraceVerbose("macro expansion is pending: ")(expandee)
+ macroLogVerbose(s"macro expansion is pending: $expandee")
}
case _ =>
// do nothing
@@ -962,3 +803,24 @@ object MacrosStats {
val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
}
+
+class Fingerprint(val value: Int) extends AnyVal {
+ def paramPos = { assert(isTag, this); value }
+ def isTag = value >= 0
+ def isOther = this == Other
+ def isExpr = this == LiftedTyped
+ def isTree = this == LiftedUntyped
+ override def toString = this match {
+ case Other => "Other"
+ case LiftedTyped => "Expr"
+ case LiftedUntyped => "Tree"
+ case _ => s"Tag($value)"
+ }
+}
+
+object Fingerprint {
+ def Tagged(tparamPos: Int) = new Fingerprint(tparamPos)
+ val Other = new Fingerprint(-1)
+ val LiftedTyped = new Fingerprint(-2)
+ val LiftedUntyped = new Fingerprint(-3)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 50383a1e78..646bf3a153 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -103,7 +103,7 @@ trait MethodSynthesis {
createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
- createMethod(name, List(IntClass.tpe), returnType) { m =>
+ createMethod(name, List(IntTpe), returnType) { m =>
val arg0 = Ident(m.firstParam)
val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
@@ -382,7 +382,7 @@ trait MethodSynthesis {
def name = tree.name
def category = GetterTargetClass
def flagsMask = GetterFlags
- def flagsExtra = ACCESSOR | ( if (tree.mods.isMutable) 0 else STABLE )
+ def flagsExtra = ACCESSOR.toLong | ( if (tree.mods.isMutable) 0 else STABLE )
override def validate() {
assert(derivedSym != NoSymbol, tree)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index de3010c371..0305aab844 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -9,6 +9,7 @@ package typechecker
import scala.collection.mutable
import scala.annotation.tailrec
import symtab.Flags._
+import scala.language.postfixOps
/** This trait declares methods to create symbols and to enter them into scopes.
*
@@ -47,7 +48,6 @@ trait Namers extends MethodSynthesis {
private class NormalNamer(context: Context) extends Namer(context)
def newNamer(context: Context): Namer = new NormalNamer(context)
- def newNamerFor(context: Context, tree: Tree): Namer = newNamer(context.makeNewScope(tree, tree.symbol))
abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
// overridden by the presentation compiler
@@ -254,7 +254,7 @@ trait Namers extends MethodSynthesis {
case DocDef(_, defn) => enterSym(defn)
case tree @ Import(_, _) =>
assignSymbol(tree)
- returnContext = context.makeNewImport(tree)
+ returnContext = context.make(tree)
case _ =>
}
returnContext
@@ -765,7 +765,7 @@ trait Namers extends MethodSynthesis {
def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitTpe)
else NullaryMethodType(typeSig(tree))
pluginsTypeSigAccessor(tp, typer, tree, sym)
}
@@ -863,7 +863,7 @@ trait Namers extends MethodSynthesis {
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
- if (tpt.tpe.isError) AnyRefClass.tpe
+ if (tpt.tpe.isError) AnyRefTpe
else tpt.tpe
}
@@ -1267,7 +1267,7 @@ trait Namers extends MethodSynthesis {
val defaultTree = atPos(vparam.pos.focus) {
DefDef(
- Modifiers(meth.flags & DefaultGetterFlags) | SYNTHETIC | DEFAULTPARAM | oflag,
+ Modifiers(meth.flags & DefaultGetterFlags) | (SYNTHETIC | DEFAULTPARAM | oflag).toLong,
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
@@ -1342,13 +1342,16 @@ trait Namers extends MethodSynthesis {
private def importSig(imp: Import) = {
val Import(expr, selectors) = imp
val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
+
if (expr1.symbol != null && expr1.symbol.isRootPackage)
RootImportError(imp)
if (expr1.isErrorTyped)
ErrorType
else {
+ if (!treeInfo.isStableIdentifierPattern(expr1))
+ typer.TyperErrorGen.UnstableTreeError(expr1)
+
val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
checkSelectors(newImport)
transformed(imp) = newImport
@@ -1490,8 +1493,8 @@ trait Namers extends MethodSynthesis {
private object RestrictJavaArraysMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, ArrayClass, List(elemtp))
- if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectClass.tpe) =>
- TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectClass.tpe))))
+ if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe) =>
+ TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectTpe))))
case _ =>
mapOver(tp)
}
@@ -1513,7 +1516,7 @@ trait Namers extends MethodSynthesis {
AbstractMemberWithModiferError(sym, flag)
}
def checkNoConflict(flag1: Int, flag2: Int) {
- if (sym hasAllFlags flag1 | flag2)
+ if (sym hasAllFlags flag1.toLong | flag2)
IllegalModifierCombination(sym, flag1, flag2)
}
if (sym.isImplicit) {
@@ -1629,7 +1632,7 @@ trait Namers extends MethodSynthesis {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
if (defnSym.isAbstractType)
- newNamerFor(ctx, tree) enterSyms tparams //@M
+ newNamer(ctx.makeNewScope(tree, tree.symbol)) enterSyms tparams //@M
restp complete sym
}
}
@@ -1683,8 +1686,13 @@ trait Namers extends MethodSynthesis {
* call this method?
*/
def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
+ val owner = original.owner
+ // SI-7264 Force the info of owners from previous compilation runs.
+ // Doing this generally would trigger cycles; that's what we also
+ // use the lower-level scan through the current Context as a fall back.
+ if (!currentRun.compiles(owner)) owner.initialize
original.companionSymbol orElse {
- ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
+ ctx.lookup(original.name.companionName, owner).suchThat(sym =>
(original.isTerm || sym.hasModuleFlag) &&
(sym isCoDefinedWith original)
)
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 6921f8ce27..8e9933f734 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -19,6 +19,7 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
import NamesDefaultsErrorsGen._
+ import treeInfo.WildcardStarArg
// Default getters of constructors are added to the companion object in the
// typeCompleter of the constructor (methodSig). To compute the signature,
@@ -201,7 +202,7 @@ trait NamesDefaults { self: Analyzer =>
if (module == NoSymbol) None
else {
val ref = atPos(pos.focus)(gen.mkAttributedRef(pre, module))
- if (module.isStable && pre.isStable) // fixes #4524. the type checker does the same for
+ if (treeInfo.admitsTypeSelection(ref)) // fixes #4524. the type checker does the same for
ref.setType(singleType(pre, module)) // typedSelect, it calls "stabilize" on the result.
Some(ref)
}
@@ -278,14 +279,16 @@ trait NamesDefaults { self: Analyzer =>
val repeated = isScalaRepeatedParamType(paramTpe)
val argTpe = (
if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
+ case WildcardStarArg(expr) => expr.tpe
+ case _ => seqType(arg.tpe)
}
- else
- // Note stabilizing can lead to a non-conformant argument when existentials are involved, e.g. neg/t3507-old.scala, hence the filter.
- gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe)
- // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
- ).deconst
+ else {
+ // TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
+ // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+ // which is important for (at least) macros.
+ arg.tpe
+ }
+ ).widen // have to widen or types inferred from literal defaults will be singletons
val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
if (byName) functionType(Nil, argTpe) else argTpe
)
@@ -302,11 +305,8 @@ trait NamesDefaults { self: Analyzer =>
} else {
new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- expr
- case _ =>
- val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
- blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
+ case WildcardStarArg(expr) => expr
+ case _ => blockTyper typed gen.mkSeqApply(resetLocalAttrs(arg))
} else arg
}
Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
@@ -330,9 +330,12 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
typedApp match {
- // Extract the typed arguments, restore the call-site evaluation order (using
- // ValDef's in the block), change the arguments to these local values.
- case Apply(expr, typedArgs) if !(typedApp :: typedArgs).exists(_.isErrorTyped) => // bail out with erroneous args, see SI-7238
+ case Apply(expr, typedArgs) if (typedApp :: typedArgs).exists(_.isErrorTyped) =>
+ setError(tree) // bail out with and erroneous Apply *or* erroneous arguments, see SI-7238, SI-7509
+ case Apply(expr, typedArgs) =>
+ // Extract the typed arguments, restore the call-site evaluation order (using
+ // ValDef's in the block), change the arguments to these local values.
+
// typedArgs: definition-site order
val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
@@ -370,6 +373,8 @@ trait NamesDefaults { self: Analyzer =>
}
}
+ def makeNamedTypes(syms: List[Symbol]) = syms map (sym => NamedType(sym.name, sym.tpe))
+
def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = {
val namedArgs = args.dropWhile(arg => {
val n = argName(arg)
@@ -451,20 +456,6 @@ trait NamesDefaults { self: Analyzer =>
} else NoSymbol
}
- private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
- val savedParams = context.extractUndetparams()
- val savedReporting = context.ambiguousErrors
-
- context.setAmbiguousErrors(false)
- try fn(savedParams)
- finally {
- context.setAmbiguousErrors(savedReporting)
- //@M note that we don't get here when an ambiguity was detected (during the computation of res),
- // as errorTree throws an exception
- context.undetparams = savedParams
- }
- }
-
/** A full type check is very expensive; let's make sure there's a name
* somewhere which could potentially be ambiguous before we go that route.
*/
@@ -479,12 +470,10 @@ trait NamesDefaults { self: Analyzer =>
// def f[T](x: T) = x
// var x = 0
// f(x = 1) << "x = 1" typechecks with expected type WildcardType
- savingUndeterminedTParams(context) { udp =>
+ val udp = context.undetparams
+ context.savingUndeterminedTypeParams(reportAmbiguous = false) {
val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
- override def apply(tp: Type): Type = super.apply(tp match {
- case TypeRef(_, ByNameParamClass, x :: Nil) => x
- case _ => tp
- })
+ override def apply(tp: Type): Type = super.apply(dropByName(tp))
}
// This throws an exception which is caught in `tryTypedApply` (as it
// uses `silent`) - unfortunately, tryTypedApply recovers from the
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index efd4fd804f..c9849eebb5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -59,7 +59,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
override def changesBaseClasses = false
override def transformInfo(sym: Symbol, tp: Type): Type = {
- if (sym.isModule && !sym.isStatic) sym setFlag (lateMETHOD | STABLE)
+ // !!! This is a sketchy way to do things.
+ // It would be better to replace the module symbol with a method symbol
+ // rather than creating this module/method hybrid which must be special
+ // cased all over the place. Look for the call sites which use(d) some
+ // variation of "isMethod && !isModule", which to an observer looks like
+ // a nonsensical condition. (It is now "isModuleNotMethod".)
+ if (sym.isModule && !sym.isStatic) {
+ sym setFlag lateMETHOD | STABLE
+ // Note that this as far as we can see it works equally well
+ // to set the METHOD flag here and dump lateMETHOD, but it does
+ // mean that under separate compilation the typer will see
+ // modules as methods (albeit stable ones with singleton types.)
+ // So for now lateMETHOD lives while we try to convince ourselves
+ // we can live without it or deliver that info some other way.
+ log(s"Stabilizing module method for ${sym.fullLocationString}")
+ }
super.transformInfo(sym, tp)
}
@@ -111,7 +126,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val defaultMethodNames = defaultGetters map (sym => nme.defaultGetterToMethod(sym.name))
defaultMethodNames.toList.distinct foreach { name =>
- val methods = clazz.info.findMember(name, 0L, METHOD, false).alternatives
+ val methods = clazz.info.findMember(name, 0L, METHOD, stableOnly = false).alternatives
def hasDefaultParam(tpe: Type): Boolean = tpe match {
case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
case _ => false
@@ -144,7 +159,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// This has become noisy with implicit classes.
if (settings.lint && settings.developer) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
- val alts = clazz.info.decl(sym.name).alternatives
+ // implicit classes leave both a module symbol and a method symbol as residue
+ val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
if (alts.size > 1)
alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
}
@@ -390,7 +406,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
overrideError("cannot be used here - classes can only override abstract types")
} else if (other.isEffectivelyFinal) { // (1.2)
overrideError("cannot override final member")
- } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ } else if (!other.isDeferred && !other.hasFlag(DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
if (isNeitherInClass && !(other.owner isSubClass member.owner))
@@ -495,7 +511,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
if (member.isStable && !otherTp.isVolatile) {
- if (memberTp.isVolatile)
+ // (1.4), pt 2 -- member.isStable && memberTp.isVolatile started being possible after SI-6815
+ // (before SI-6815, !symbol.tpe.isVolatile was implied by symbol.isStable)
+ // TODO: allow overriding when @uncheckedStable?
+ if (memberTp.isVolatile)
overrideError("has a volatile type; cannot override a member with non-volatile type")
else memberTp.dealiasWiden.resultType match {
case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
@@ -585,8 +604,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def stubImplementations: List[String] = {
// Grouping missing methods by the declaring class
val regrouped = missingMethods.groupBy(_.owner).toList
- def membersStrings(members: List[Symbol]) =
- members.sortBy("" + _.name) map (m => m.defStringSeenAs(clazz.tpe memberType m) + " = ???")
+ def membersStrings(members: List[Symbol]) = {
+ members foreach fullyInitializeSymbol
+ members.sortBy(_.name) map (m => m.defStringSeenAs(clazz.tpe_* memberType m) + " = ???")
+ }
if (regrouped.tail.isEmpty)
membersStrings(regrouped.head._2)
@@ -729,7 +750,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Have to use matchingSymbol, not a method involving overridden symbols,
// because the scala type system understands that an abstract method here does not
// override a concrete method in Object. The jvm, however, does not.
- val overridden = decl.matchingSymbol(ObjectClass, ObjectClass.tpe)
+ val overridden = decl.matchingSymbol(ObjectClass, ObjectTpe)
if (overridden.isFinal)
unit.error(decl.pos, "trait cannot redefine final method from class AnyRef")
}
@@ -926,9 +947,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ =>
}
+ private def isObjectOrAnyComparisonMethod(sym: Symbol) = sym match {
+ case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
+ case _ => false
+ }
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 =>
- def isReferenceOp = name == nme.eq || name == nme.ne
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ // Make sure the 'eq' or 'ne' method is the one in AnyRef.
+ def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
def isNew(tree: Tree) = tree match {
case Function(_, _)
| Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
@@ -965,12 +991,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val s = fn.symbol
(s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
}
+ def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
// Whether the operands+operator represent a warnable combo (assuming anyrefs)
// Looking for comparisons performed with ==/!= in combination with either an
// equals method inherited from Object or a case class synthetic equals (for
// which we know the logic.)
def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
- def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
+ def isEitherNullable = (NullTpe <:< receiver.info) || (NullTpe <:< actual.info)
+ def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
@@ -982,6 +1011,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// used to short-circuit unrelatedTypes check if both sides are special
def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
val nullCount = onSyms(_ filter (_ == NullClass) size)
+ def isNonsenseValueClassCompare = (
+ !haveSubclassRelationship
+ && isUsingDefaultScalaOp
+ && isEitherValueClass
+ && !isCaseEquals
+ )
def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
@@ -993,10 +1028,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def nonSensiblyNeq() = nonSensible("", false)
def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
+ def unrelatedMsg = name match {
+ case nme.EQ | nme.eq => "never compare equal"
+ case _ => "always compare unequal"
+ }
def unrelatedTypes() = {
- val msg = if (name == nme.EQ || name == nme.eq)
- "never compare equal" else "always compare unequal"
- unit.warning(pos, typesString + " are unrelated: they will most likely " + msg)
+ val weaselWord = if (isEitherValueClass) "" else " most likely"
+ unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
}
if (nullCount == 2) // null == null
@@ -1035,15 +1073,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ // warn if one but not the other is a derived value class
+ // this is especially important to enable transitioning from
+ // regular to value classes without silent failures.
+ if (isNonsenseValueClassCompare)
+ unrelatedTypes()
// possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
- if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectClass.tpe <:< common)
+ if (ObjectTpe <:< common)
unrelatedTypes()
}
- def eitherSubclasses = (actual isSubClass receiver) || (receiver isSubClass actual)
// warn if actual has a case parent that is not same as receiver's;
// if actual is not a case, then warn if no common supertype, as below
if (isCaseEquals) {
@@ -1056,14 +1098,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
//else
// if a class, it must be super to thisCase (and receiver) since not <: thisCase
if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
- else if (!eitherSubclasses) warnIfLubless()
+ else if (!haveSubclassRelationship) warnIfLubless()
case _ =>
}
}
- else if (actual isSubClass receiver) ()
- else if (receiver isSubClass actual) ()
// warn only if they have no common supertype below Object
- else {
+ else if (!haveSubclassRelationship) {
warnIfLubless()
}
}
@@ -1261,6 +1301,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ private def checkDelayedInitSelect(qual: Tree, sym: Symbol, pos: Position) = {
+ def isLikelyUninitialized = (
+ (sym.owner isSubClass DelayedInitClass)
+ && !qual.tpe.isInstanceOf[ThisType]
+ && sym.accessedOrSelf.isVal
+ )
+ if (settings.lint.value && isLikelyUninitialized)
+ unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
+ }
+
private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
(otherSym != NoSymbol)
&& !otherSym.isProtected
@@ -1464,6 +1514,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
+ checkDelayedInitSelect(qual, sym, tree.pos)
if (sym eq NoSymbol)
devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
@@ -1484,7 +1535,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def transformIf(tree: If): Tree = {
val If(cond, thenpart, elsepart) = tree
def unitIfEmpty(t: Tree): Tree =
- if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitClass.tpe) else t
+ if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t
cond.tpe match {
case ConstantType(value) =>
@@ -1511,10 +1562,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Verify classes extending AnyVal meet the requirements
private def checkAnyValSubclass(clazz: Symbol) = {
- if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
+ if (clazz.isDerivedValueClass) {
if (clazz.isTrait)
unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
- else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+ else if (clazz.hasAbstractFlag)
unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
}
}
@@ -1591,7 +1642,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
enterReference(tree.pos, tpt.tpe.typeSymbol)
tree
- case Typed(_, Ident(tpnme.WILDCARD_STAR)) if !isRepeatedParamArg(tree) =>
+ case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
"(such annotations are only allowed in arguments to *-parameters)")
tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 64fcda3b80..c616ded704 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -10,7 +10,7 @@ trait StdAttachments {
* At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
* After a macro application has been successfully expanded, this attachment is destroyed.
*/
- type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
+ type UnaffiliatedMacroContext = scala.reflect.macros.contexts.Context
type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
@@ -127,4 +127,31 @@ trait StdAttachments {
/** Determines whether the given tree has an associated SuperArgsAttachment.
*/
def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
+
+ /** @see markMacroImplRef
+ */
+ case object MacroImplRefAttachment
+
+ /** Marks the tree as a macro impl reference, which is a naked reference to a method.
+ *
+ * This is necessary for typechecking macro impl references (see `DefaultMacroCompiler.defaultResolveMacroImpl`),
+ * because otherwise typing a naked reference will result in the "follow this method with `_' if you want to
+ * treat it as a partially applied function" errors.
+ *
+ * This mark suppresses adapt except for when the annottee is a macro application.
+ */
+ def markMacroImplRef(tree: Tree): Tree = tree.updateAttachment(MacroImplRefAttachment)
+
+ /** Unmarks the tree as a macro impl reference (see `markMacroImplRef` for more information).
+ *
+ * This is necessary when a tree that was previously deemed to be a macro impl reference,
+ * typechecks to be a macro application. Then we need to unmark it, expand it and try to treat
+ * its expansion as a macro impl reference.
+ */
+ def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type]
+
+ /** Determines whether a tree should or should not be adapted,
+ * because someone has put MacroImplRefAttachment on it.
+ */
+ def isMacroImplRef(tree: Tree): Boolean = tree.attachments.get[MacroImplRefAttachment.type].isDefined
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index b4270ea322..6933b10a0a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -4,7 +4,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.collection.{ mutable, immutable }
@@ -29,7 +30,7 @@ import symtab.Flags._
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
import global._
- import definitions.{ UnitClass, ObjectClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
+ import definitions._
import analyzer.{ restrictionError }
/** the following two members override abstract members in Transform */
@@ -64,8 +65,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
debuglog(s"add super acc ${sym.fullLocationString} to $clazz")
val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym
val tpe = clazz.thisType memberType sym match {
- case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
- case t => t
+ case t if sym.isModuleNotMethod => NullaryMethodType(t)
+ case t => t
}
acc setInfoAndEnter (tpe cloneInfo acc)
// Diagnostic for SI-7091
@@ -81,32 +82,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
private def transformArgs(params: List[Symbol], args: List[Tree]) = {
treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) =>
if (isByNameParamType(param.tpe))
- withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
+ withInvalidOwner(transform(arg))
else transform(arg)
}
}
- private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
- def typeError(typer: analyzer.Typer, pos: Position, found: Type, req: Type) {
- if (!found.isErroneous && !req.isErroneous) {
- val msg = analyzer.ErrorUtils.typeErrorMsg(found, req, typer.infer.isPossiblyMissingArgs(found, req))
- typer.context.error(pos, analyzer.withAddendum(pos)(msg))
- if (settings.explaintypes)
- explainTypes(found, req)
- }
- }
-
- if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
- val packed = localTyper.packedType(tree, NoSymbol)
- if (!(packed <:< pt)) {
- val errorContext = localTyper.context.make(localTyper.context.tree)
- errorContext.setReportErrors()
- typeError(analyzer.newTyper(errorContext), tree.pos, packed, pt)
- }
- }
- tree
- }
-
/** Check that a class and its companion object to not both define
* a class or module with same name
*/
@@ -256,9 +236,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- // direct calls to aliases of param accessors to the superclass in order to avoid
+
+ def isAccessibleFromSuper(sym: Symbol) = {
+ val pre = SuperType(sym.owner.tpe, qual.tpe)
+ localTyper.context.isAccessible(sym, pre, superAccess = true)
+ }
+
+ // Direct calls to aliases of param accessors to the superclass in order to avoid
// duplicating fields.
- if (sym.isParamAccessor && sym.alias != NoSymbol) {
+ // ... but, only if accessible (SI-6793)
+ if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) {
val result = (localTyper.typedPos(tree.pos) {
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
@@ -471,7 +458,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val protAcc = clazz.newMethod(accName, field.pos, newFlags = ARTIFACT)
val paramTypes = List(clazz.typeOfThis, field.tpe)
val params = protAcc newSyntheticValueParams paramTypes
- val accessorType = MethodType(params, UnitClass.tpe)
+ val accessorType = MethodType(params, UnitTpe)
protAcc setInfoAndEnter accessorType
val obj :: value :: Nil = params
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index c531caa2e8..61295b5abd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -9,6 +9,7 @@ package typechecker
import scala.collection.{ mutable, immutable }
import symtab.Flags._
import scala.collection.mutable.ListBuffer
+import scala.language.postfixOps
/** Synthetic method implementations for case classes and case objects.
*
@@ -94,12 +95,12 @@ trait SyntheticMethods extends ast.TreeDSL {
// which they shouldn't.
val accessorLub = (
if (settings.Xexperimental) {
- global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
+ global.weakLub(accessors map (_.tpe.finalResultType)) match {
case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
case tp => tp
}
}
- else AnyClass.tpe
+ else AnyTpe
)
def forwardToRuntime(method: Symbol): Tree =
@@ -130,7 +131,7 @@ trait SyntheticMethods extends ast.TreeDSL {
def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree =
createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx)))
- // def productElementNameMethod = perElementMethod(nme.productElementName, StringClass.tpe)(x => LIT(x.name.toString))
+ // def productElementNameMethod = perElementMethod(nme.productElementName, StringTpe)(x => LIT(x.name.toString))
var syntheticCanEqual = false
@@ -139,7 +140,7 @@ trait SyntheticMethods extends ast.TreeDSL {
*/
def canEqualMethod: Tree = {
syntheticCanEqual = true
- createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ createMethod(nme.canEqual_, List(AnyTpe), BooleanTpe)(m =>
Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
}
@@ -199,7 +200,7 @@ trait SyntheticMethods extends ast.TreeDSL {
* }
* }
*/
- def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
if (accessors.isEmpty)
if (clazz.isFinal) thatTest(m)
else thatTest(m) AND ((thatCast(m) DOT nme.canEqual_)(mkThis))
@@ -213,14 +214,14 @@ trait SyntheticMethods extends ast.TreeDSL {
* val x$1 = that.asInstanceOf[this.C]
* (this.underlying == that.underlying
*/
- def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
equalsCore(m, List(clazz.derivedValueClassUnbox))
}
/* The hashcode method for value classes
* def hashCode(): Int = this.underlying.hashCode
*/
- def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+ def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntTpe) { m =>
Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_)
}
@@ -253,19 +254,20 @@ trait SyntheticMethods extends ast.TreeDSL {
def hashcodeImplementation(sym: Symbol): Tree = {
sym.tpe.finalResultType.typeSymbol match {
- case UnitClass | NullClass => Literal(Constant(0))
- case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
- case IntClass | ShortClass | ByteClass | CharClass => Ident(sym)
- case LongClass => callStaticsMethod("longHash")(Ident(sym))
- case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
- case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
- case _ => callStaticsMethod("anyHash")(Ident(sym))
+ case UnitClass | NullClass => Literal(Constant(0))
+ case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+ case IntClass => Ident(sym)
+ case ShortClass | ByteClass | CharClass => Select(Ident(sym), nme.toInt)
+ case LongClass => callStaticsMethod("longHash")(Ident(sym))
+ case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
+ case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
+ case _ => callStaticsMethod("anyHash")(Ident(sym))
}
}
def specializedHashcode = {
- createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
- val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntClass.tpe
+ createMethod(nme.hashCode_, Nil, IntTpe) { m =>
+ val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntTpe
val valdef = ValDef(accumulator, Literal(Constant(0xcafebabe)))
val mixes = accessors map (acc =>
Assign(
@@ -336,11 +338,13 @@ trait SyntheticMethods extends ast.TreeDSL {
def shouldGenerate(m: Symbol) = {
!hasOverridingImplementation(m) || {
clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && {
- if (settings.lint) {
- (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
- currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
- }
- }
+ // Without a means to suppress this warning, I've thought better of it.
+ //
+ // if (settings.lint) {
+ // (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
+ // currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
+ // }
+ // }
true
}
}
@@ -353,7 +357,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// This method should be generated as private, but apparently if it is, then
// it is name mangled afterward. (Wonder why that is.) So it's only protected.
// For sure special methods like "readResolve" should not be mangled.
- List(createMethod(nme.readResolve, Nil, ObjectClass.tpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+ List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
}
else Nil
)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index d2d7f57aef..32a66aa4dd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -30,7 +30,7 @@ trait Tags {
* However we found out that we don't really need this concept, so it got removed.
*
* @param pos Position for error reporting. Please, provide meaningful value.
- * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntClass.tpe) will look for ClassTag[Int].
+ * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntTpe) will look for ClassTag[Int].
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope.
* If false then materialization macros are prohibited from running.
*
@@ -49,7 +49,7 @@ trait Tags {
* @param pre Prefix that represents a universe this type tag will be bound to.
* If `pre` is set to `NoType`, then any type tag in scope will do, regardless of its affiliation.
* If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkRuntimeUniverseRef` will be used.
- * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntClass.tpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
+ * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntTpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
* @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
* If false then the function will always succeed (abstract types will be reified as free types).
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
@@ -69,4 +69,4 @@ trait Tags {
resolveTag(pos, taggedTp, allowMaterialization)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 5f45fead91..a93baabc51 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -126,7 +126,7 @@ trait TypeDiagnostics {
else if (!member.isDeferred) member.accessed
else {
val getter = if (member.isSetter) member.getter(member.owner) else member
- val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED | MUTABLE else DEFERRED
+ val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED.toLong | MUTABLE else DEFERRED
getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
}
@@ -253,7 +253,7 @@ trait TypeDiagnostics {
// For found/required errors where AnyRef would have sufficed:
// explain in greater detail.
def explainAnyVsAnyRef(found: Type, req: Type): String = {
- if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+ if (AnyRefTpe <:< req) notAnyRefMessage(found) else ""
}
// TODO - figure out how to avoid doing any work at all
@@ -271,6 +271,24 @@ trait TypeDiagnostics {
)
}
+ def typePatternAdvice(sym: Symbol, ptSym: Symbol) = {
+ val clazz = if (sym.isModuleClass) sym.companionClass else sym
+ val caseString =
+ if (clazz.isCaseClass && (clazz isSubClass ptSym))
+ ( clazz.caseFieldAccessors
+ map (_ => "_") // could use the actual param names here
+ mkString (s"`case ${clazz.name}(", ",", ")`")
+ )
+ else
+ "`case _: " + (clazz.typeParams match {
+ case Nil => "" + clazz.name
+ case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
+ })+ "`"
+
+ "\nNote: if you intended to match against the class, try "+ caseString
+
+ }
+
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
// save the name because it will be mutated until it has been
// distinguished from the other types in the same error message
@@ -551,11 +569,7 @@ trait TypeDiagnostics {
}
// The checkDead call from typedArg is more selective.
- def inMode(mode: Mode, tree: Tree): Tree = {
- val modeOK = (mode & (EXPRmode | BYVALmode | POLYmode)) == (EXPRmode | BYVALmode)
- if (modeOK) apply(tree)
- else tree
- }
+ def inMode(mode: Mode, tree: Tree): Tree = if (mode.typingMonoExprByValue) apply(tree) else tree
}
private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
index eb05486dca..afe6875218 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
@@ -153,7 +153,7 @@ trait TypeStrings {
private type JClass = java.lang.Class[_]
private val ObjectClass = classOf[java.lang.Object]
private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
- private val primitiveMap = primitives.toList map { x =>
+ private val primitiveMap = (primitives.toList map { x =>
val key = x match {
case "int" => "Integer"
case "char" => "Character"
@@ -165,7 +165,7 @@ trait TypeStrings {
}
("java.lang." + key) -> ("scala." + value)
- } toMap
+ }).toMap
def isAnonClass(cl: Class[_]) = {
val xs = cl.getName.reverse takeWhile (_ != '$')
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 36542cc9d9..cb3a12b60d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -9,7 +9,8 @@
// Added: Thu Apr 12 18:23:58 2007
//todo: disallow C#D in superclass
//todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.collection.mutable
@@ -33,9 +34,9 @@ trait Typers extends Adaptations with Tags {
import TypersStats._
final def forArgMode(fun: Tree, mode: Mode) =
- if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
- else mode
+ if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
+ // printResult(s"forArgMode($fun, $mode) gets SCCmode")(mode | SCCmode)
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
// Also used to cache imports type-checked by namer.
@@ -106,6 +107,11 @@ trait Typers extends Adaptations with Tags {
import typeDebug.{ ptTree, ptBlock, ptLine }
import TyperErrorGen._
+ val infer = new Inferencer(context0) {
+ // See SI-3281 re undoLog
+ override def isCoercible(tp: Type, pt: Type) = undoLog undo viewExists(tp, pt)
+ }
+
/** Overridden to false in scaladoc and/or interactive. */
def canAdaptConstantTypeToLiteral = true
def canTranslateEmptyListToNil = true
@@ -114,14 +120,6 @@ trait Typers extends Adaptations with Tags {
def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
typed(docDef.definition, mode, pt)
- val infer = new Inferencer(context0) {
- override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
- tp.isError || pt.isError ||
- context0.implicitsEnabled && // this condition prevents chains of views
- inferView(EmptyTree, tp, pt, reportAmbiguous = false) != EmptyTree
- }
- }
-
/** Find implicit arguments and pass them to given tree.
*/
def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
@@ -150,15 +148,19 @@ trait Typers extends Adaptations with Tags {
} else {
mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
if (!param.hasDefault && !paramFailed) {
- context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
- case Some(divergentImplicit) =>
+ context.reportBuffer.errors.collectFirst {
+ case dte: DivergentImplicitTypeError => dte
+ } match {
+ case Some(divergent) =>
// DivergentImplicit error has higher priority than "no implicit found"
// no need to issue the problem again if we are still in silent mode
if (context.reportErrors) {
- context.issue(divergentImplicit)
- context.condBufferFlush(_.kind == ErrorKinds.Divergent)
+ context.issue(divergent.withPt(paramTp))
+ context.reportBuffer.clearErrors {
+ case dte: DivergentImplicitTypeError => true
+ }
}
- case None =>
+ case _ =>
NoImplicitFoundError(fun, param)
}
paramFailed = true
@@ -184,6 +186,13 @@ trait Typers extends Adaptations with Tags {
fun
}
+ def viewExists(from: Type, to: Type): Boolean = (
+ !from.isError
+ && !to.isError
+ && context.implicitsEnabled
+ && (inferView(EmptyTree, from, to, reportAmbiguous = false) != EmptyTree)
+ )
+
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
inferView(tree, from, to, reportAmbiguous, saveErrors = true)
@@ -202,10 +211,10 @@ trait Typers extends Adaptations with Tags {
debuglog("infer view from "+from+" to "+to)//debug
if (isPastTyper) EmptyTree
else from match {
- case MethodType(_, _) => EmptyTree
+ case MethodType(_, _) => EmptyTree
case OverloadedType(_, _) => EmptyTree
- case PolyType(_, _) => EmptyTree
- case _ =>
+ case PolyType(_, _) => EmptyTree
+ case _ =>
def wrapImplicit(from: Type): Tree = {
val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors)
if (result.subst != EmptyTreeTypeSubstituter) {
@@ -245,29 +254,6 @@ trait Typers extends Adaptations with Tags {
case _ => tp
}
- /** Check that `tree` is a stable expression.
- */
- def checkStable(tree: Tree): Tree = (
- if (treeInfo.isExprSafeToInline(tree)) tree
- else if (tree.isErrorTyped) tree
- else UnstableTreeError(tree)
- )
-
- /** Would tree be a stable (i.e. a pure expression) if the type
- * of its symbol was not volatile?
- */
- protected def isStableExceptVolatile(tree: Tree) = {
- tree.hasSymbolField && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
- { val savedTpe = tree.symbol.info
- val savedSTABLE = tree.symbol getFlag STABLE
- tree.symbol setInfo AnyRefClass.tpe
- tree.symbol setFlag STABLE
- val result = treeInfo.isExprSafeToInline(tree)
- tree.symbol setInfo savedTpe
- tree.symbol setFlag savedSTABLE
- result
- }
- }
private def errorNotClass(tpt: Tree, found: Type) = { ClassTypeRequiredError(tpt, found); false }
private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false }
@@ -490,14 +476,6 @@ trait Typers extends Adaptations with Tags {
}
@inline
- final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
- val res = f(newTyper(c))
- if (c.hasErrors)
- context.issue(c.errBuffer.head)
- res
- }
-
- @inline
final def withSavedContext[T](c: Context)(f: => T) = {
val savedErrors = c.flushAndReturnBuffer()
val res = f
@@ -515,8 +493,6 @@ trait Typers extends Adaptations with Tags {
typer1
} else this
- final val xtypes = false
-
/** Is symbol defined and not stale?
*/
def reallyExists(sym: Symbol) = {
@@ -537,13 +513,19 @@ trait Typers extends Adaptations with Tags {
/** Does the context of tree `tree` require a stable type?
*/
- private def isStableContext(tree: Tree, mode: Mode, pt: Type) =
- isNarrowable(tree.tpe) && mode.inExprMode && mode.inNone(LHSmode) &&
- (xtypes ||
- (pt.isStable ||
- mode.inAll(QUALmode) && !tree.symbol.isConstant ||
- pt.typeSymbol.isAbstractType && pt.bounds.lo.isStable && !(tree.tpe <:< pt)) ||
- pt.typeSymbol.isRefinementClass && !(tree.tpe <:< pt))
+ private def isStableContext(tree: Tree, mode: Mode, pt: Type) = {
+ def ptSym = pt.typeSymbol
+ def expectsStable = (
+ pt.isStable
+ || mode.inQualMode && !tree.symbol.isConstant
+ || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.bounds.lo.isStable || ptSym.isRefinementClass)
+ )
+
+ ( isNarrowable(tree.tpe)
+ && mode.typingExprNotLhs
+ && expectsStable
+ )
+ }
/** Make symbol accessible. This means:
* If symbol refers to package object, insert `.package` as second to last selector.
@@ -590,46 +572,58 @@ trait Typers extends Adaptations with Tags {
}
/** Post-process an identifier or selection node, performing the following:
- * 1. Check that non-function pattern expressions are stable
+ * 1. Check that non-function pattern expressions are stable (ignoring volatility concerns -- SI-6815)
+ * (and narrow the type of modules: a module reference in a pattern has type Foo.type, not "object Foo")
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ // Side effect time! Don't be an idiot like me and think you
+ // can move "val sym = tree.symbol" before this line, because
+ // inferExprAlternative side-effects the tree's symbol.
if (tree.symbol.isOverloaded && !mode.inFunMode)
inferExprAlternative(tree, pt)
val sym = tree.symbol
- def fail() = NotAValueError(tree, sym)
+ val isStableIdPattern = mode.typingPatternNotConstructor && tree.isTerm
- if (tree.isErrorTyped) tree
- else if (mode.inPatternNotFunMode && tree.isTerm) { // (1)
- if (sym.isValue) {
- val tree1 = checkStable(tree)
- // A module reference in a pattern has type Foo.type, not "object Foo"
- if (sym.isModule && !sym.isMethod) tree1 setType singleType(pre, sym)
- else tree1
- }
- else fail()
- } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
- fail()
- } else {
- if (sym.isStable && pre.isStable && !isByNameParamType(tree.tpe) &&
- (isStableContext(tree, mode, pt) || sym.isModule && !sym.isMethod))
- tree.setType(singleType(pre, sym))
- // To fully benefit from special casing the return type of
- // getClass, we have to catch it immediately so expressions
- // like x.getClass().newInstance() are typed with the type of x.
- else if ( tree.symbol.name == nme.getClass_
- && tree.tpe.params.isEmpty
- // TODO: If the type of the qualifier is inaccessible, we can cause private types
- // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
- // so for now it requires the type symbol be public.
- && pre.typeSymbol.isPublic)
- tree setType MethodType(Nil, getClassReturnType(pre))
- else
- tree
- }
+ def isModuleTypedExpr = (
+ treeInfo.admitsTypeSelection(tree)
+ && (isStableContext(tree, mode, pt) || sym.isModuleNotMethod)
+ )
+ def isStableValueRequired = (
+ isStableIdPattern
+ || mode.in(all = EXPRmode, none = QUALmode) && !phase.erasedTypes
+ )
+ // To fully benefit from special casing the return type of
+ // getClass, we have to catch it immediately so expressions like
+ // x.getClass().newInstance() are typed with the type of x. TODO: If the
+ // type of the qualifier is inaccessible, we can cause private types to
+ // escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
+ // this so for now it requires the type symbol be public.
+ def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
+
+ def narrowIf(tree: Tree, condition: Boolean) =
+ if (condition) tree setType singleType(pre, sym) else tree
+
+ def checkStable(tree: Tree): Tree =
+ if (treeInfo.isStableIdentifierPattern(tree)) tree
+ else UnstableTreeError(tree)
+
+ if (tree.isErrorTyped)
+ tree
+ else if (!sym.isValue && isStableValueRequired) // (2)
+ NotAValueError(tree, sym)
+ else if (isStableIdPattern) // (1)
+ // A module reference in a pattern has type Foo.type, not "object Foo"
+ narrowIf(checkStable(tree), sym.isModuleNotMethod)
+ else if (isModuleTypedExpr) // (3)
+ narrowIf(tree, true)
+ else if (isGetClassCall) // (4)
+ tree setType MethodType(Nil, getClassReturnType(pre))
+ else
+ tree
}
private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match {
@@ -641,12 +635,17 @@ trait Typers extends Adaptations with Tags {
val sym = tree.symbol
val pre = tree match {
case Select(qual, _) => qual.tpe
- case _ => NoPrefix
+ case _ => NoPrefix
+ }
+ def stabilizable = (
+ pre.isStable
+ && sym.tpe.params.isEmpty
+ && (isStableContext(tree, mode, pt) || sym.isModule)
+ )
+ tree.tpe match {
+ case MethodType(_, _) if stabilizable => tree setType MethodType(Nil, singleType(pre, sym)) // TODO: should this be a NullaryMethodType?
+ case _ => tree
}
- if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
- (isStableContext(tree, mode, pt) || sym.isModule))
- tree.setType(MethodType(List(), singleType(pre, sym))) // TODO: should this be a NullaryMethodType?
- else tree
}
/** The member with given name of given qualifier tree */
@@ -687,17 +686,14 @@ trait Typers extends Adaptations with Tags {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- if (context1.hasErrors) {
- stopStats()
- SilentTypeError(context1.errBuffer.head)
- } else {
- // If we have a successful result, emit any warnings it created.
- if (context1.hasWarnings) {
- context1.flushAndReturnWarningsBuffer() foreach {
- case (pos, msg) => unit.warning(pos, msg)
- }
- }
- SilentResultValue(result)
+ context1.firstError match {
+ case Some(err) =>
+ stopStats()
+ SilentTypeError(err)
+ case None =>
+ // If we have a successful result, emit any warnings it created.
+ context1.flushAndIssueWarnings()
+ SilentResultValue(result)
}
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
@@ -740,16 +736,19 @@ trait Typers extends Adaptations with Tags {
if (!OK) {
val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
featureTrait getAnnotation LanguageFeatureAnnot
- val req = if (required) "needs to" else "should"
- var raw = featureDesc + " " + req + " be enabled\n" +
- "by making the implicit value language." + featureName + " visible."
- if (!(currentRun.reportedFeature contains featureTrait))
- raw += "\nThis can be achieved by adding the import clause 'import scala.language." + featureName + "'\n" +
- "or by setting the compiler option -language:" + featureName + ".\n" +
- "See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
- "why the feature " + req + " be explicitly enabled."
+ val req = if (required) "needs to" else "should"
+ val fqname = "scala.language." + featureName
+ val explain = (
+ if (currentRun.reportedFeature contains featureTrait) "" else
+ s"""|
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$featureName.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled.""".stripMargin
+ )
currentRun.reportedFeature += featureTrait
- val msg = raw replace ("#", construct)
+
+ val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
if (required) unit.error(pos, msg)
else currentRun.featureWarnings.warn(pos, msg)
}
@@ -806,9 +805,11 @@ trait Typers extends Adaptations with Tags {
* If all this fails, error
*/
protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
+ def hasUndets = context.undetparams.nonEmpty
+ def hasUndetsInMonoMode = hasUndets && !mode.inPolyMode
def adaptToImplicitMethod(mt: MethodType): Tree = {
- if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ if (hasUndets) { // (9) -- should revisit dropped condition `hasUndetsInMonoMode`
// dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
// needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
@@ -825,23 +826,24 @@ trait Typers extends Adaptations with Tags {
withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
if (original != EmptyTree && pt != WildcardType) (
typer1 silent { tpr =>
- val withImplicitArgs = tpr.applyImplicitArgs(tree)
- if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
- else tpr.typed(withImplicitArgs, mode, pt)
+ val withImplicitArgs = tpr.applyImplicitArgs(tree)
+ if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+ else tpr.typed(withImplicitArgs, mode, pt)
}
orElse { _ =>
- debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
- val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
+ val resetTree = resetLocalAttrs(original)
+ debuglog(s"fallback on implicits: ${tree}/$resetTree")
+ val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
// we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
- }
+ }
)
- else
- typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ else
+ typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
)
- }
+ }
def instantiateToMethodType(mt: MethodType): Tree = {
val meth = tree match {
@@ -850,59 +852,67 @@ trait Typers extends Adaptations with Tags {
case _ => tree.symbol
}
if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
- debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
+ debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
- if (context.undetparams.nonEmpty) {
- // #2624: need to infer type arguments for eta expansion of a polymorphic method
- // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
- // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
- // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
- // (note that (3) does not call typed to do the polymorphic type instantiation --
- // it is called after the tree has been typed with a polymorphic expected result type)
- instantiate(typed(tree0, mode, WildcardType), mode, pt)
- } else
+ // #2624: need to infer type arguments for eta expansion of a polymorphic method
+ // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
+ // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
+ // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
+ // (note that (3) does not call typed to do the polymorphic type instantiation --
+ // it is called after the tree has been typed with a polymorphic expected result type)
+ if (hasUndets)
+ instantiate(typed(tree0, mode), mode, pt)
+ else
typed(tree0, mode, pt)
- } else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
- adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
- } else if (context.implicitsEnabled) {
+ }
+ else if (!meth.isConstructor && mt.params.isEmpty) // (4.3)
+ adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
+ else if (context.implicitsEnabled)
MissingArgsForMethodTpeError(tree, meth)
- } else {
+ else
setError(tree)
- }
}
def adaptType(): Tree = {
- if (mode.inFunMode) {
- // todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
- // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
- // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
- // tree setType tree.tpe.normalize
+ // @M When not typing a type constructor (!context.inTypeConstructorAllowed)
+ // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // and thus parameterized types must be applied to their type arguments
+ // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ def properTypeRequired = (
+ tree.hasSymbolField
+ && !context.inTypeConstructorAllowed
+ && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ )
+ // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
+ // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
+ // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol
+ // (TypeTree's must also be checked here, and they don't directly have a symbol)
+ def kindArityMismatch = (
+ context.inTypeConstructorAllowed
+ && !sameLength(tree.tpe.typeParams, pt.typeParams)
+ )
+ // Note that we treat Any and Nothing as kind-polymorphic.
+ // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
+ // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ def kindArityMismatchOk = tree.tpe.typeSymbol match {
+ case NothingClass | AnyClass => true
+ case _ => pt == WildcardType
+ }
+
+ // todo. It would make sense when mode.inFunMode to instead use
+ // tree setType tree.tpe.normalize
+ // when typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
+ // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
+ // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
+ if (mode.inFunMode)
tree
- } else if (tree.hasSymbolField && !tree.symbol.typeParams.isEmpty && !mode.inHKMode &&
- !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
- // @M When not typing a higher-kinded type ((mode & HKmode) == 0)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
- // and thus parameterized types must be applied to their type arguments
- // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ else if (properTypeRequired && tree.symbol.typeParams.nonEmpty) // (7)
MissingTypeParametersError(tree)
- } else if ( // (7.1) @M: check kind-arity
- // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
- mode.inHKMode &&
- // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
- // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
- !sameLength(tree.tpe.typeParams, pt.typeParams) &&
- !(tree.tpe.typeSymbol == AnyClass ||
- tree.tpe.typeSymbol == NothingClass ||
- pt == WildcardType)) {
- // Check that the actual kind arity (tree.symbol.typeParams.length) conforms to the expected
- // kind-arity (pt.typeParams.length). Full checks are done in checkKindBounds in Infer.
- // Note that we treat Any and Nothing as kind-polymorphic.
- // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
- // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ else if (kindArityMismatch && !kindArityMismatchOk) // (7.1) @M: check kind-arity
KindArityMismatchError(tree, pt)
- } else tree match { // (6)
+ else tree match { // (6)
case TypeTree() => tree
case _ => TypeTree(tree.tpe) setOriginal tree
}
@@ -1024,9 +1034,10 @@ trait Typers extends Adaptations with Tags {
}
def insertApply(): Tree = {
- assert(!mode.inHKMode, mode) //@M
+ assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
- def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
+
// TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
val qual = adapted match {
case This(_) =>
@@ -1047,31 +1058,199 @@ trait Typers extends Adaptations with Tags {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
}
+ def adaptConstant(value: Constant): Tree = {
+ val sym = tree.symbol
+ if (sym != null && sym.isDeprecated) {
+ val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
+ unit.deprecationWarning(tree.pos, msg)
+ }
+ treeCopy.Literal(tree, value)
+ }
+
+ // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+ // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+ // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ //
+ // val x: T = expr
+ //
+ // where T is the type of expr, but T contains existential skolems ts.
+ // In that case, this value definition does not typecheck.
+ // The value definition
+ //
+ // val x: T forSome { ts } = expr
+ //
+ // would typecheck. Or one can simply leave out the type of the `val`:
+ //
+ // val x = expr
+ //
+ // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
+ // type is an existential type.
+ //
+ // The reason for both failures have to do with the way we (don't) transform
+ // skolem types along with the trees that contain them. We'd need a
+ // radically different approach to do it. But before investing a lot of time to
+ // to do this (I have already sunk 3 full days with in the end futile attempts
+ // to consistently transform skolems and fix 6029), I'd like to
+ // investigate ways to avoid skolems completely.
+ //
+ // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
+ // (which is the return type of the macro definition instantiated in the context of expandee):
+ //
+ // Test.scala:2: error: type mismatch;
+ // found : $u.Expr[Class[_ <: Object]]
+ // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
+ // scala.reflect.runtime.universe.reify(new Object().getClass)
+ // ^
+ // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
+ // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
+ //
+ def adaptMismatchedSkolems() = {
+ def canIgnoreMismatch = (
+ !context.reportErrors && isPastTyper
+ || tree.attachments.get[MacroExpansionAttachment].isDefined
+ )
+ def bound = pt match {
+ case ExistentialType(qs, _) => qs
+ case _ => Nil
+ }
+ def msg = sm"""
+ |Recovering from existential or skolem type error in
+ | $tree
+ |with type: ${tree.tpe}
+ | pt: $pt
+ | context: ${context.tree}
+ | adapted
+ """.trim
+
+ val boundOrSkolems = if (canIgnoreMismatch) bound ++ pt.skolemsExceptMethodTypeParams else Nil
+ boundOrSkolems match {
+ case Nil => AdaptTypeError(tree, tree.tpe, pt) ; setError(tree)
+ case _ => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt)))
+ }
+ }
+
+ def fallbackAfterVanillaAdapt(): Tree = {
+ def isPopulatedPattern = {
+ if ((tree.symbol ne null) && tree.symbol.isModule)
+ inferModulePattern(tree, pt)
+
+ isPopulated(tree.tpe, approximateAbstracts(pt))
+ }
+ if (mode.inPatternMode && isPopulatedPattern)
+ return tree
+
+ val tree1 = constfold(tree, pt) // (10) (11)
+ if (tree1.tpe <:< pt)
+ return adapt(tree1, mode, pt, original)
+
+ if (mode.typingExprNotFun) {
+ // The <: Any requirement inhibits attempts to adapt continuation types
+ // to non-continuation types.
+ if (tree.tpe <:< AnyTpe) pt.dealias match {
+ case TypeRef(_, UnitClass, _) => // (12)
+ if (settings.warnValueDiscard)
+ context.unit.warning(tree.pos, "discarded non-Unit value")
+ return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(()))))
+ case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) =>
+ if (settings.warnNumericWiden)
+ context.unit.warning(tree.pos, "implicit numeric widening")
+ return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name))
+ case _ =>
+ }
+ if (pt.dealias.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt)) // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
+
+ if (hasUndets)
+ return instantiate(tree, mode, pt)
+
+ if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
+ // (14); the condition prevents chains of views
+ debuglog("inferring view from " + tree.tpe + " to " + pt)
+ inferView(tree, tree.tpe, pt, reportAmbiguous = true) match {
+ case EmptyTree =>
+ case coercion =>
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val res = newTyper(silentContext).typed(
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ silentContext.firstError match {
+ case Some(err) => context.issue(err)
+ case None => return res
+ }
+ }
+ }
+ }
+
+ debuglog("error tree = " + tree)
+ if (settings.debug && settings.explaintypes)
+ explainTypes(tree.tpe, pt)
+
+ if (tree.tpe.isErroneous || pt.isErroneous)
+ setError(tree)
+ else
+ adaptMismatchedSkolems()
+ }
+
+ def vanillaAdapt(tree: Tree) = {
+ def applyPossible = {
+ def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
+ def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty)
+ def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0)
+
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if (mode.inTappMode)
+ tree.tpe.typeParams.isEmpty && hasPolymorphicApply
+ else
+ hasMonomorphicApply
+ )
+ }
+ def shouldInsertApply(tree: Tree) = mode.typingExprFun && {
+ tree.tpe match {
+ case _: MethodType | _: OverloadedType | _: PolyType => false
+ case _ => applyPossible
+ }
+ }
+ if (tree.isType)
+ adaptType()
+ else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree))
+ macroExpandApply(this, tree, mode, pt)
+ else if (mode.typingConstructorPattern)
+ adaptConstrPattern()
+ else if (shouldInsertApply(tree))
+ insertApply()
+ else if (hasUndetsInMonoMode) { // (9)
+ assert(!context.inTypeConstructorAllowed, context) //@M
+ instantiatePossiblyExpectingUnit(tree, mode, pt)
+ }
+ else if (tree.tpe <:< pt)
+ tree
+ else
+ fallbackAfterVanillaAdapt()
+ }
// begin adapt
- tree.tpe match {
+ if (isMacroImplRef(tree)) {
+ if (treeInfo.isMacroApplication(tree)) adapt(unmarkMacroImplRef(tree), mode, pt, original)
+ else tree
+ } else tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
- val sym = tree.symbol
- if (sym != null && sym.isDeprecated) {
- val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
- unit.deprecationWarning(tree.pos, msg)
- }
- treeCopy.Literal(tree, value)
+ adaptConstant(value)
case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
adapt(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
- case TypeRef(_, ByNameParamClass, List(arg)) if mode.inExprMode => // (2)
+ case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
adapt(tree setType arg, mode, pt, original)
- case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
- ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
- case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
- case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode | HKmode) => // (3)
+ case tp if mode.typingExprNotLhs && isExistentialType(tp) =>
+ adapt(tree setType tp.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
+ case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode) && !context.inTypeConstructorAllowed => // (3)
// assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
// we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
// ticket #2197 triggered turning the assert into a guard
@@ -1080,171 +1259,21 @@ trait Typers extends Adaptations with Tags {
// -- are we sure we want to expand aliases this early?
// -- what caused this change in behaviour??
val tparams1 = cloneSymbols(tparams)
- val tree1 = if (tree.isType) tree
- else TypeApply(tree, tparams1 map (tparam =>
- TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ val tree1 = (
+ if (tree.isType) tree
+ else TypeApply(tree, tparams1 map (tparam => TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ )
context.undetparams ++= tparams1
notifyUndetparamsAdded(tparams1)
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
- case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
- adaptToImplicitMethod(mt)
- case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
- (context.undetparams.isEmpty || mode.inPolyMode)) && !treeInfo.isMacroApplicationOrBlock(tree) =>
+ case mt: MethodType if mode.typingExprNotFunNotLhs && mt.isImplicit => // (4.1)
+ adaptToImplicitMethod(mt)
+ case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) =>
instantiateToMethodType(mt)
-
case _ =>
- def vanillaAdapt(tree: Tree) = {
- def shouldInsertApply(tree: Tree) = mode.inAll(EXPRmode | FUNmode) && (tree.tpe match {
- case _: MethodType | _: OverloadedType | _: PolyType => false
- case _ => applyPossible
- })
- def applyPossible = {
- def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- dyna.acceptsApplyDynamic(tree.tpe) || (
- if (mode.inAll(TAPPmode))
- tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
- )
- }
- if (tree.isType)
- adaptType()
- else if (mode.inAll(PATTERNmode | FUNmode))
- adaptConstrPattern()
- else if (shouldInsertApply(tree))
- insertApply()
- else if (!context.undetparams.isEmpty && !mode.inPolyMode) { // (9)
- assert(!mode.inHKMode, mode) //@M
- if (mode.inExprModeButNot(FUNmode) && pt.typeSymbol == UnitClass)
- instantiateExpectingUnit(tree, mode)
- else
- instantiate(tree, mode, pt)
- } else if (tree.tpe <:< pt) {
- tree
- } else {
- def fallBack: Tree = {
- if (mode.inPatternMode) {
- if ((tree.symbol ne null) && tree.symbol.isModule)
- inferModulePattern(tree, pt)
- if (isPopulated(tree.tpe, approximateAbstracts(pt)))
- return tree
- }
- val tree1 = constfold(tree, pt) // (10) (11)
- if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
- else {
- if (mode.inExprModeButNot(FUNmode)) {
- pt.dealias match {
- // The <: Any requirement inhibits attempts to adapt continuation types
- // to non-continuation types.
- case TypeRef(_, sym, _) if tree.tpe <:< AnyClass.tpe =>
- // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
- // infinite expansion if pt is constant type ()
- if (sym == UnitClass) { // (12)
- if (settings.warnValueDiscard)
- context.unit.warning(tree.pos, "discarded non-Unit value")
- return typedPos(tree.pos, mode, pt) {
- Block(List(tree), Literal(Constant(())))
- }
- }
- else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
- if (settings.warnNumericWiden)
- context.unit.warning(tree.pos, "implicit numeric widening")
- return typedPos(tree.pos, mode, pt) {
- Select(tree, "to" + sym.name)
- }
- }
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
- case _ =>
- }
- if (!context.undetparams.isEmpty) {
- return instantiate(tree, mode, pt)
- }
- if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
- // (14); the condition prevents chains of views
- debuglog("inferring view from " + tree.tpe + " to " + pt)
- val coercion = inferView(tree, tree.tpe, pt, reportAmbiguous = true)
- if (coercion != EmptyTree) {
- def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
- if (settings.logImplicitConv)
- unit.echo(tree.pos, msg)
-
- debuglog(msg)
- val silentContext = context.makeImplicit(context.ambiguousErrors)
- val res = newTyper(silentContext).typed(
- new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
- if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
- }
- }
- }
- if (settings.debug) {
- log("error tree = " + tree)
- if (settings.explaintypes) explainTypes(tree.tpe, pt)
- }
-
- val found = tree.tpe
- if (!found.isErroneous && !pt.isErroneous) {
- if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
- val bound = pt match {
- case ExistentialType(qs, _) => qs
- case _ => Nil
- }
- val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
- if (boundOrSkolems.nonEmpty) {
- // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
- // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
- //
- // val x: T = expr
- //
- // where T is the type of expr, but T contains existential skolems ts.
- // In that case, this value definition does not typecheck.
- // The value definition
- //
- // val x: T forSome { ts } = expr
- //
- // would typecheck. Or one can simply leave out the type of the `val`:
- //
- // val x = expr
- //
- // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
- // type is an existential type.
- //
- // The reason for both failures have to do with the way we (don't) transform
- // skolem types along with the trees that contain them. We'd need a
- // radically different approach to do it. But before investing a lot of time to
- // to do this (I have already sunk 3 full days with in the end futile attempts
- // to consistently transform skolems and fix 6029), I'd like to
- // investigate ways to avoid skolems completely.
- //
- // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
- // (which is the return type of the macro definition instantiated in the context of expandee):
- //
- // Test.scala:2: error: type mismatch;
- // found : $u.Expr[Class[_ <: Object]]
- // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
- // scala.reflect.runtime.universe.reify(new Object().getClass)
- // ^
- // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
- // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
- //
- log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
- }
- }
- // create an actual error
- AdaptTypeError(tree, found, pt)
- }
- setError(tree)
- }
- }
- fallBack
- }
+ vanillaAdapt(tree)
}
- val tree1 = if (mode.inExprModeButNot(FUNmode) && treeInfo.isMacroApplication(tree)) macroExpandApply(this, tree, mode, pt) else tree
- if (tree == tree1) vanillaAdapt(tree1) else tree1
- }
}
def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
@@ -1257,13 +1286,20 @@ trait Typers extends Adaptations with Tags {
*/
def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = {
val savedUndetparams = context.undetparams
- silent(_.instantiate(tree, mode, UnitClass.tpe)) orElse { _ =>
- context.undetparams = savedUndetparams
- val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(()))))
- typed(valueDiscard, mode, UnitClass.tpe)
+ silent(_.instantiate(tree, mode, UnitTpe)) orElse { _ =>
+ context.undetparams = savedUndetparams
+ val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(()))))
+ typed(valueDiscard, mode, UnitTpe)
}
}
+ def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
+ if (mode.typingExprNotFun && pt.typeSymbol == UnitClass)
+ instantiateExpectingUnit(tree, mode)
+ else
+ instantiate(tree, mode, pt)
+ }
+
private def isAdaptableWithView(qual: Tree) = {
val qtpe = qual.tpe.widen
( !isPastTyper
@@ -1331,15 +1367,15 @@ trait Typers extends Adaptations with Tags {
*/
def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
def onError(reportError: => Tree): Tree = context.tree match {
- case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
+ case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
( silent (_.typedArgs(args.map(_.duplicate), mode))
filter (xs => !(xs exists (_.isErrorTyped)))
map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors))
orElse ( _ => reportError)
)
- case _ =>
- reportError
- }
+ case _ =>
+ reportError
+ }
silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (err =>
onError {
@@ -1347,7 +1383,7 @@ trait Typers extends Adaptations with Tags {
setError(tree)
}
)
- }
+ }
/** Try to apply an implicit conversion to `qual` to that it contains a
* member `name` of arbitrary type.
@@ -1517,7 +1553,7 @@ trait Typers extends Adaptations with Tags {
*/
private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
val app = treeInfo.dissectApplied(encodedtpt)
- val (treeInfo.Applied(core, targs, argss), decodedtpt) = ((app, app.callee))
+ val (treeInfo.Applied(core, _, argss), decodedtpt) = ((app, app.callee))
val argssAreTrivial = argss == Nil || argss == ListOfNil
// we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
@@ -1537,9 +1573,9 @@ trait Typers extends Adaptations with Tags {
}
typedType(decodedtpt)
} else {
- var supertpt = typedTypeConstructor(decodedtpt)
+ val supertpt = typedTypeConstructor(decodedtpt)
val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else Nil
- if (supertparams.nonEmpty) {
+ def inferParentTypeArgs: Tree = {
typedPrimaryConstrBody(templ) {
val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
val supercall = New(supertpe, mmap(argss)(_.duplicate))
@@ -1547,14 +1583,17 @@ trait Typers extends Adaptations with Tags {
ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
atPos(supertpt.pos.focus)(supercall)
} match {
- case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt)
- case tpt => supertpt = TypeTree(tpt.tpe) setPos supertpt.pos // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
+ case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt); supertpt
+ case tpt => TypeTree(tpt.tpe) setPos supertpt.pos // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
}
}
+
+ val supertptWithTargs = if (supertparams.isEmpty || context.unit.isJava) supertpt else inferParentTypeArgs
+
// this is the place where we tell the typer what argss should be used for the super call
// if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
// the super call dummy is already good enough, so we don't need to do anything
- if (argssAreTrivial) supertpt else supertpt updateAttachment SuperArgsAttachment(argss)
+ if (argssAreTrivial) supertptWithTargs else supertptWithTargs updateAttachment SuperArgsAttachment(argss)
}
}
@@ -1632,22 +1671,27 @@ trait Typers extends Adaptations with Tags {
/** Makes sure that the first type tree in the list of parent types is always a class.
* If the first parent is a trait, prepend its supertype to the list until it's a class.
-*/
- private def normalizeFirstParent(parents: List[Tree]): List[Tree] = parents match {
- case first :: rest if treeInfo.isTraitRef(first) =>
- def explode(supertpt: Tree, acc: List[Tree]): List[Tree] = {
- if (treeInfo.isTraitRef(supertpt)) {
- val supertpt1 = typedType(supertpt)
- if (!supertpt1.isErrorTyped) {
- val supersupertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
- return explode(supersupertpt, supertpt1 :: acc)
- }
- }
- if (supertpt.tpe.typeSymbol == AnyClass) supertpt setType AnyRefClass.tpe
- supertpt :: acc
- }
- explode(first, Nil) ++ rest
- case _ => parents
+ */
+ private def normalizeFirstParent(parents: List[Tree]): List[Tree] = {
+ @annotation.tailrec
+ def explode0(parents: List[Tree]): List[Tree] = {
+ val supertpt :: rest = parents // parents is always non-empty here - it only grows
+ if (supertpt.tpe.typeSymbol == AnyClass) {
+ supertpt setType AnyRefTpe
+ parents
+ } else if (treeInfo isTraitRef supertpt) {
+ val supertpt1 = typedType(supertpt)
+ def supersuper = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ if (supertpt1.isErrorTyped) rest
+ else explode0(supersuper :: supertpt1 :: rest)
+ } else parents
+ }
+
+ def explode(parents: List[Tree]) =
+ if (treeInfo isTraitRef parents.head) explode0(parents)
+ else parents
+
+ if (parents.isEmpty) Nil else explode(parents)
}
/** Certain parents are added in the parser before it is known whether
@@ -1669,7 +1713,7 @@ trait Typers extends Adaptations with Tags {
}
def typedParentTypes(templ: Template): List[Tree] = templ.parents match {
- case Nil => List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
+ case Nil => List(atPos(templ.pos)(TypeTree(AnyRefTpe)))
case first :: rest =>
try {
val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
@@ -1695,7 +1739,7 @@ trait Typers extends Adaptations with Tags {
log("Type error calculating parents in template " + templ)
log("Error: " + ex)
ParentTypesError(templ, ex)
- List(TypeTree(AnyRefClass.tpe))
+ List(TypeTree(AnyRefTpe))
}
}
@@ -1737,14 +1781,16 @@ trait Typers extends Adaptations with Tags {
if (psym.isFinal)
pending += ParentFinalInheritanceError(parent, psym)
- if (psym.hasDeprecatedInheritanceAnnotation) {
+ val sameSourceFile = context.unit.source.file == psym.sourceFile
+
+ if (psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
unit.deprecationWarning(parent.pos, msg)
}
if (psym.isSealed && !phase.erasedTypes)
- if (context.unit.source.file == psym.sourceFile)
+ if (sameSourceFile)
psym addChild context.owner
else
pending += ParentSealedInheritanceError(parent, psym)
@@ -1752,7 +1798,6 @@ trait Typers extends Adaptations with Tags {
if (!(selfType <:< parent.tpe.typeOfThis) &&
!phase.erasedTypes &&
!context.owner.isSynthetic && // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
- !settings.noSelfCheck && // setting to suppress this very check
!selfType.isErroneous &&
!parent.tpe.isErroneous)
{
@@ -1780,7 +1825,7 @@ trait Typers extends Adaptations with Tags {
for (tparam <- clazz.typeParams) {
if (classinfo.expansiveRefs(tparam) contains tparam) {
val newinfo = ClassInfoType(
- classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))),
+ classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefTpe))),
classinfo.decls,
clazz)
clazz.setInfo {
@@ -1800,9 +1845,7 @@ trait Typers extends Adaptations with Tags {
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) {
- _.typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
- }
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
@@ -1843,19 +1886,21 @@ trait Typers extends Adaptations with Tags {
|| !linkedClass.isSerializable
|| clazz.isSerializable
)
- val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
- _.typedTemplate(mdef.impl, {
- typedParentTypes(mdef.impl) ++ (
- if (noSerializable) Nil
- else {
- clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
- }
- )
- })
- }
+ val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
+ typedParentTypes(mdef.impl) ++ (
+ if (noSerializable) Nil
+ else {
+ clazz.makeSerializable()
+ List(TypeTree(SerializableTpe) setPos clazz.pos.focus)
+ }
+ )
+ })
+
val impl2 = finishMethodSynthesis(impl1, clazz, context)
+ if (mdef.symbol == PredefModule)
+ ensurePredefParentsAreInSameSourceFile(impl2)
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
// same owner. Until that can be straightened out we will warn on companion objects in package
@@ -1884,6 +1929,12 @@ trait Typers extends Adaptations with Tags {
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
+
+ private def ensurePredefParentsAreInSameSourceFile(template: Template) = {
+ val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass)
+ if (parentSyms exists (_.associatedFile != PredefModule.associatedFile))
+ unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
+ }
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
* ...but it turns out it's also the ideal spot for namer/typer coordination for
@@ -2097,7 +2148,7 @@ trait Typers extends Adaptations with Tags {
orElse (superAcc getter superAcc.owner)
filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
)
- if (alias.exists && !alias.accessed.isVariable) {
+ if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
case acc => acc
@@ -2331,7 +2382,7 @@ trait Typers extends Adaptations with Tags {
case ldef @ LabelDef(_, _, _) =>
if (ldef.symbol == NoSymbol)
ldef.symbol = namer.enterInScope(
- context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitClass.tpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitTpe))
case _ =>
}
}
@@ -2454,7 +2505,7 @@ trait Typers extends Adaptations with Tags {
namer.enterIfNotThere(bind.symbol)
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
- else typed(cdef.guard, BooleanClass.tpe)
+ else typed(cdef.guard, BooleanTpe)
var body1: Tree = typed(cdef.body, pt)
if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) {
@@ -2477,20 +2528,25 @@ trait Typers extends Adaptations with Tags {
def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
- def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
- def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
+ def packedTypes(trees: List[Tree]): List[Type] = trees map (c => packedType(c, context.owner).deconst)
// takes untyped sub-trees of a match and type checks them
def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = {
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ val selector1 = checkDead(typedByValueExpr(selector))
val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
val casesTyped = typedCases(cases, selectorTp, pt)
- val (resTp, needAdapt) = ptOrLubPacked(casesTyped, pt)
+ def finish(cases: List[CaseDef], matchType: Type) =
+ treeCopy.Match(tree, selector1, cases) setType matchType
- val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
-
- treeCopy.Match(tree, selector1, casesAdapted) setType resTp
+ if (isFullyDefined(pt))
+ finish(casesTyped, pt)
+ else packedTypes(casesTyped) match {
+ case packed if sameWeakLubAsLub(packed) => finish(casesTyped, lub(packed))
+ case packed =>
+ val lub = weakLub(packed)
+ finish(casesTyped map (adaptCase(_, mode, lub)), lub)
+ }
}
// match has been typed -- virtualize it during type checking so the full context is available
@@ -2500,7 +2556,7 @@ trait Typers extends Adaptations with Tags {
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
if (!(newPatternMatching && settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
- else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) orElse (_ => null)
+ else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
@@ -2546,7 +2602,7 @@ trait Typers extends Adaptations with Tags {
val argTp :: resTp :: Nil = targs
// targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
- val targsValidParams = targs forall (_ <:< AnyClass.tpe)
+ val targsValidParams = targs forall (_ <:< AnyTpe)
val anonClass = (context.owner
newAnonymousFunctionClass tree.pos
@@ -2674,12 +2730,12 @@ trait Typers extends Adaptations with Tags {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
methodBodyTyper.context.scope enter paramSym
- methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
+ methodSym setInfo MethodType(List(paramSym), BooleanTpe)
val defaultCase = mkDefaultCase(FALSE)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanTpe)
- DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe))
}
// only used for @cps annotated partial functions
@@ -2688,7 +2744,7 @@ trait Typers extends Adaptations with Tags {
val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
val paramSym = mkParam(methodSym)
- methodSym setInfo MethodType(List(paramSym), AnyClass.tpe)
+ methodSym setInfo MethodType(List(paramSym), AnyTpe)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
// should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
@@ -2822,8 +2878,8 @@ trait Typers extends Adaptations with Tags {
templ updateAttachment att.copy(stats = stats1)
for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol)
stat.symbol setFlag OVERRIDE
- }
- }
+ }
+ }
def typedImport(imp : Import) : Import = (transformed remove imp) match {
case Some(imp1: Import) => imp1
@@ -2843,7 +2899,7 @@ trait Typers extends Adaptations with Tags {
case imp @ Import(_, _) =>
imp.symbol.initialize
if (!imp.symbol.isError) {
- context = context.makeNewImport(imp)
+ context = context.make(imp)
typedImport(imp)
} else EmptyTree
case _ =>
@@ -2857,7 +2913,7 @@ trait Typers extends Adaptations with Tags {
} else newTyper(context.make(stat, exprOwner))
// XXX this creates a spurious dead code warning if an exception is thrown
// in a constructor, even if it is the only thing in the constructor.
- val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
+ val result = checkDead(localTyper.typedByValueExpr(stat))
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
@@ -2971,7 +3027,7 @@ trait Typers extends Adaptations with Tags {
def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = {
val typedMode = mode.onlySticky | newmode
- val t = withCondConstrTyper((mode & SCCmode) != NOmode)(_.typed(arg, typedMode, pt))
+ val t = withCondConstrTyper(mode.inSccMode)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
@@ -2987,19 +3043,18 @@ trait Typers extends Adaptations with Tags {
* (docs reverse-engineered -- AM)
*/
def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- val sticky = mode.onlySticky
def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
if (args.isEmpty || adapted.isEmpty) Nil
else {
// No formals left or * indicates varargs.
val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
- val typedMode = sticky | (
- if (isVarArgs) STARmode | BYVALmode
- else if (isByNameParamType(formals.head)) NOmode
- else BYVALmode
- )
+ val isByName = formals.nonEmpty && isByNameParamType(formals.head)
+ def typedMode = if (isByName) mode.onlySticky else mode.onlySticky | BYVALmode
+ def body = typedArg(args.head, mode, typedMode, adapted.head)
+ def arg1 = if (isVarArgs) context.withinStarPatterns(body) else body
+
// formals may be empty, so don't call tail
- typedArg(args.head, mode, typedMode, adapted.head) :: loop(args.tail, formals drop 1, adapted.tail)
+ arg1 :: loop(args.tail, formals drop 1, adapted.tail)
}
}
loop(args0, formals0, adapted0)
@@ -3056,11 +3111,11 @@ trait Typers extends Adaptations with Tags {
// less expensive than including them in inferMethodAlternative (see below).
def shapeType(arg: Tree): Type = arg match {
case Function(vparams, body) =>
- functionType(vparams map (_ => AnyClass.tpe), shapeType(body))
+ functionType(vparams map (_ => AnyTpe), shapeType(body))
case AssignOrNamedArg(Ident(name), rhs) =>
NamedType(name, shapeType(rhs))
case _ =>
- NothingClass.tpe
+ NothingTpe
}
val argtypes = args map shapeType
val pre = fun.symbol.tpe.prefix
@@ -3096,30 +3151,29 @@ trait Typers extends Adaptations with Tags {
fun.tpe match {
case OverloadedType(pre, alts) =>
def handleOverloaded = {
- val undetparams = context.extractUndetparams()
- val argtpes = new ListBuffer[Type]
- val amode = forArgMode(fun, mode)
- val args1 = args map {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- // named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
- argtpes += NamedType(name, rhs1.tpe.deconst)
- // the assign is untyped; that's ok because we call doTypedApply
- treeCopy.AssignOrNamedArg(arg, arg.lhs, rhs1)
- case arg @ Typed(repeated, Ident(tpnme.WILDCARD_STAR)) =>
- val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
- argtpes += RepeatedType(arg1.tpe.deconst)
- arg1
- case arg =>
- val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
- argtpes += arg1.tpe.deconst
- arg1
+ val undetparams = context.undetparams
+ val (args1, argTpes) = context.savingUndeterminedTypeParams() {
+ val amode = forArgMode(fun, mode)
+ def typedArg0(tree: Tree) = typedArg(tree, amode, BYVALmode, WildcardType)
+ args.map {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+ val rhs1 = typedArg0(rhs)
+ // the assign is untyped; that's ok because we call doTypedApply
+ val arg1 = treeCopy.AssignOrNamedArg(arg, arg.lhs, rhs1)
+ (arg1, NamedType(name, rhs1.tpe.deconst))
+ case arg @ treeInfo.WildcardStarArg(repeated) =>
+ val arg1 = typedArg0(arg)
+ (arg1, RepeatedType(arg1.tpe.deconst))
+ case arg =>
+ val arg1 = typedArg0(arg)
+ (arg1, arg1.tpe.deconst)
+ }.unzip
}
- context.undetparams = undetparams
if (context.hasErrors)
setError(tree)
else {
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt)
+ inferMethodAlternative(fun, undetparams, argTpes, pt)
doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
@@ -3135,7 +3189,7 @@ trait Typers extends Adaptations with Tags {
* to that. This is the last thing which is tried (after
* default arguments)
*/
- def tryTupleApply: Option[Tree] = (
+ def tryTupleApply: Tree = (
if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
@@ -3144,14 +3198,15 @@ trait Typers extends Adaptations with Tags {
silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
// Depending on user options, may warn or error here if
// a Unit or tuple was inserted.
- Some(t) filter (tupledTree =>
- !mode.inExprModeButNot(FUNmode)
- || tupledTree.symbol == null
- || checkValidAdaptation(tupledTree, args)
+ val keepTree = (
+ !mode.typingExprNotFun
+ || t.symbol == null
+ || checkValidAdaptation(t, args)
)
- } orElse { _ => context.undetparams = savedUndetparams ; None }
- }
- else None
+ if (keepTree) t else EmptyTree
+ } orElse { _ => context.undetparams = savedUndetparams ; EmptyTree }
+ }
+ else EmptyTree
)
/* Treats an application which uses named or default arguments.
@@ -3164,7 +3219,7 @@ trait Typers extends Adaptations with Tags {
def checkNotMacro() = {
if (treeInfo.isMacroApplication(fun))
- tryTupleApply getOrElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
+ tryTupleApply orElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
}
if (mt.isErroneous) duplErrTree
@@ -3172,7 +3227,7 @@ trait Typers extends Adaptations with Tags {
// #2064
duplErrorTree(WrongNumberOfArgsError(tree, fun))
} else if (lencmp > 0) {
- tryTupleApply getOrElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
+ tryTupleApply orElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
} else if (lencmp == 0) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
@@ -3225,7 +3280,7 @@ trait Typers extends Adaptations with Tags {
if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
- tryTupleApply getOrElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
+ tryTupleApply orElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
}
}
}
@@ -3261,7 +3316,7 @@ trait Typers extends Adaptations with Tags {
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
// precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
+ val restpe = mt.resultType(args1 map (arg => gen stableTypeFor arg orElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
@@ -3395,13 +3450,12 @@ trait Typers extends Adaptations with Tags {
else {
val resTp = fun1.tpe.finalResultType.dealiasWiden
val nbSubPats = args.length
-
- val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol)
+ val (formals, formalsExpanded) =
+ extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) // SI-1048
-
+ val pt1 = ensureFullyDefined(pt) // SI-1048
val itype = glb(List(pt1, arg.tpe))
arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
@@ -3490,7 +3544,7 @@ trait Typers extends Adaptations with Tags {
def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = {
// The typed tree may be relevantly different than the tree `tr`,
// e.g. it may have encountered an implicit conversion.
- val ttree = typed(constfold(tr), EXPRmode, pt)
+ val ttree = typed(constfold(tr), pt)
val const: Constant = ttree match {
case l @ Literal(c) if !l.isErroneous => c
case tree => tree.tpe match {
@@ -3529,7 +3583,7 @@ trait Typers extends Adaptations with Tags {
// use of Array.apply[T: ClassTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
- val typedFun = typed(fun, mode.forFunMode, WildcardType)
+ val typedFun = typed(fun, mode.forFunMode)
if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
pt match {
case TypeRef(_, ArrayClass, targ :: _) =>
@@ -3560,7 +3614,7 @@ trait Typers extends Adaptations with Tags {
val treeInfo.Applied(fun0, targs, argss) = ann
if (fun0.isErroneous)
return finish(ErroneousAnnotation)
- val typedFun0 = typed(fun0, mode.forFunMode, WildcardType)
+ val typedFun0 = typed(fun0, mode.forFunMode)
val typedFunPart = (
// If there are dummy type arguments in typeFun part, it suggests we
// must type the actual constructor call, not only the select. The value
@@ -3678,79 +3732,11 @@ trait Typers extends Adaptations with Tags {
})
}
- def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
- sym.isTypeParameter && sym.owner.isJavaDefined
-
- /** If we map a set of hidden symbols to their existential bounds, we
- * have a problem: the bounds may themselves contain references to the
- * hidden symbols. So this recursively calls existentialBound until
- * the typeSymbol is not amongst the symbols being hidden.
- */
- def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
- def safeBound(t: Type): Type =
- if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
-
- def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
- case tp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve safeBound
- if (parents eq parents1) tp
- else copyRefinedType(tp, parents1, decls)
- case tp => tp
- }
-
- // Hanging onto lower bound in case anything interesting
- // happens with it.
- mapFrom(hidden)(s => s.existentialBound match {
- case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
- case _ => hiBound(s)
- })
- }
-
- /** Given a set `rawSyms` of term- and type-symbols, and a type
- * `tp`, produce a set of fresh type parameters and a type so that
- * it can be abstracted to an existential type. Every type symbol
- * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
- * type `T` in `rawSyms` is given an associated type symbol of the
- * following form:
- *
- * type x.type <: T with Singleton
- *
- * The name of the type parameter is `x.type`, to produce nice
- * diagnostics. The Singleton parent ensures that the type
- * parameter is still seen as a stable type. Type symbols in
- * rawSyms are fully replaced by the new symbols. Term symbols are
- * also replaced, except for term symbols of an Ident tree, where
- * only the type of the Ident is changed.
- */
- protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = {
- val allBounds = existentialBoundsExcludingHidden(rawSyms)
- val typeParams: List[Symbol] = rawSyms map { sym =>
- val name = sym.name match {
- case x: TypeName => x
- case x => tpnme.singletonName(x)
- }
- val bound = allBounds(sym)
- val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified = sowner.newExistential(name, sym.pos)
-
- quantified setInfo bound.cloneInfo(quantified)
- }
- // Higher-kinded existentials are not yet supported, but this is
- // tpeHK for when they are: "if a type constructor is expected/allowed,
- // tpeHK must be called instead of tpe."
- val typeParamTypes = typeParams map (_.tpeHK)
- def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
-
- creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
- }
-
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
- def packSymbols(hidden: List[Symbol], tp: Type): Type =
- if (hidden.isEmpty) tp
- else existentialTransform(hidden, tp)(existentialAbstraction)
+ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context0.owner)
- def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
+ def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = (
ctx.owner.isTerm &&
(ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
{
@@ -3758,13 +3744,15 @@ trait Typers extends Adaptations with Tags {
while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
(ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
}
+ )
- def isCapturedExistential(sym: Symbol) =
- (sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
- try !isReferencedFrom(context, sym)
- finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
- }
+ def isCapturedExistential(sym: Symbol) = (
+ (sym hasAllFlags EXISTENTIAL | CAPTURED) && {
+ val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
+ try !isReferencedFrom(context, sym)
+ finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
+ }
+ )
def packCaptured(tpe: Type): Type = {
val captured = mutable.Set[Symbol]()
@@ -3867,8 +3855,16 @@ trait Typers extends Adaptations with Tags {
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) =>
- TypeTree(newExistentialType(tparams, tp)) setOriginal tree
+ existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => {
+ val original = tpt1 match {
+ case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
+ case _ => {
+ debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree")
+ tree
+ }
+ }
+ TypeTree(newExistentialType(tparams, tp)) setOriginal original
+ }
)
}
@@ -3882,7 +3878,7 @@ trait Typers extends Adaptations with Tags {
// as we don't know which alternative to choose... here we do
map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyTpe))
}
} else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
// Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
@@ -3903,7 +3899,7 @@ trait Typers extends Adaptations with Tags {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
val scrutineeType = fun match {
case Select(qual, _) => qual.tpe
- case _ => AnyClass.tpe
+ case _ => AnyTpe
}
checkCheckable(tree, targs.head, scrutineeType, inPattern = false)
}
@@ -4020,7 +4016,7 @@ trait Typers extends Adaptations with Tags {
case Apply(fn, args) if matches(fn) => Some((applyOp(args), fn))
case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs))
case _ if matches(t) => Some((nme.selectDynamic, t))
- case _ => t.children flatMap findSelection headOption
+ case _ => (t.children flatMap findSelection).headOption
}
findSelection(cxTree) match {
case Some((opName, treeInfo.Applied(_, targs, _))) =>
@@ -4048,13 +4044,9 @@ trait Typers extends Adaptations with Tags {
}
def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
- def isPatternMode = mode.inPatternMode
- def inPatternConstructor = mode.inAll(PATTERNmode | FUNmode)
- def isQualifierMode = mode.inAll(QUALmode)
-
// Lookup in the given class using the root mirror.
def lookupInOwner(owner: Symbol, name: Name): Symbol =
- if (isQualifierMode) rootMirror.missingHook(owner, name) else NoSymbol
+ if (mode.inQualMode) rootMirror.missingHook(owner, name) else NoSymbol
// Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect.
def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name)
@@ -4171,7 +4163,9 @@ trait Typers extends Adaptations with Tags {
else context.owner.newValue(name, tree.pos)
if (name != nme.WILDCARD) {
- if (mode.inAll(ALTmode)) VariableInPatternAlternativeError(tree)
+ if (context.inPatAlternative)
+ VariableInPatternAlternativeError(tree)
+
namer.enterInScope(sym)
}
@@ -4204,7 +4198,7 @@ trait Typers extends Adaptations with Tags {
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
- val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ val lhs1 = typed(lhs, EXPRmode | LHSmode)
val varsym = lhs1.symbol
// see #2494 for double error message example
@@ -4229,47 +4223,50 @@ trait Typers extends Adaptations with Tags {
// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
- val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
- treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
+ val rhs1 = typedByValueExpr(rhs, lhs1.tpe)
+ treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
- val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
+ val rhs1 = typedByValueExpr(rhs)
val t = Apply(lhs1, List(rhs1))
dyna.wrapErrors(t, _.typed1(t, mode, pt))
}
else fail()
}
- def typedIf(tree: If) = {
- val cond1 = checkDead(typed(tree.cond, EXPRmode | BYVALmode, BooleanClass.tpe))
- val thenp = tree.thenp
- val elsep = tree.elsep
- if (elsep.isEmpty) { // in the future, should be unnecessary
- val thenp1 = typed(thenp, UnitClass.tpe)
- treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
- } else {
- var thenp1 = typed(thenp, pt)
- var elsep1 = typed(elsep, pt)
- def thenTp = packedType(thenp1, context.owner)
- def elseTp = packedType(elsep1, context.owner)
-
- // println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
- val (owntype, needAdapt) =
- // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
- // in the special (though common) case where the types are equal, it pays to pack before comparing
- // especially virtpatmat needs more aggressive unification of skolemized types
- // this breaks src/library/scala/collection/immutable/TrieIterator.scala
- if (!isPastTyper && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
- && thenTp =:= elseTp
- ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
- // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
- else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
-
- if (needAdapt) { //isNumericValueType(owntype)) {
- thenp1 = adapt(thenp1, mode, owntype)
- elsep1 = adapt(elsep1, mode, owntype)
- }
- treeCopy.If(tree, cond1, thenp1, elsep1) setType owntype
+ def typedIf(tree: If): If = {
+ val cond1 = checkDead(typedByValueExpr(tree.cond, BooleanTpe))
+ // One-legged ifs don't need a lot of analysis
+ if (tree.elsep.isEmpty)
+ return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe
+
+ val thenp1 = typed(tree.thenp, pt)
+ val elsep1 = typed(tree.elsep, pt)
+
+ // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
+ // in the special (though common) case where the types are equal, it pays to pack before comparing
+ // especially virtpatmat needs more aggressive unification of skolemized types
+ // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+ // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ def samePackedTypes = (
+ !isPastTyper
+ && thenp1.tpe.annotations.isEmpty
+ && elsep1.tpe.annotations.isEmpty
+ && packedType(thenp1, context.owner) =:= packedType(elsep1, context.owner)
+ )
+ def finish(ownType: Type) = treeCopy.If(tree, cond1, thenp1, elsep1) setType ownType
+ // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+ // @PP: This was doing the samePackedTypes check BEFORE the isFullyDefined check,
+ // which based on everything I see everywhere else was a bug. I reordered it.
+ if (isFullyDefined(pt))
+ finish(pt)
+ // Important to deconst, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
+ else thenp1.tpe.deconst :: elsep1.tpe.deconst :: Nil match {
+ case tp :: _ if samePackedTypes => finish(tp)
+ case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+ case tpes =>
+ val lub = weakLub(tpes)
+ treeCopy.If(tree, cond1, adapt(thenp1, mode, lub), adapt(elsep1, mode, lub)) setType lub
}
}
@@ -4310,9 +4307,9 @@ trait Typers extends Adaptations with Tags {
val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
if (restpt.tpe eq null) {
ReturnWithoutTypeError(tree, enclMethod.owner)
- } else {
- context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+ }
+ else {
+ val expr1 = context withinReturnExpr typedByValueExpr(expr, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4322,7 +4319,7 @@ trait Typers extends Adaptations with Tags {
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe)
res.setType(tp)
}
}
@@ -4416,11 +4413,9 @@ trait Typers extends Adaptations with Tags {
case ex: CyclicReference =>
throw ex
case te: TypeError =>
- // @H some of typer erros can still leak,
+ // @H some of typer errors can still leak,
// for instance in continuations
None
- } finally {
- c.flushBuffer()
}
}
@@ -4470,7 +4465,7 @@ trait Typers extends Adaptations with Tags {
else qual
if (qual1 ne qual) {
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
- return typed1(tree1, mode | SNDTRYmode, pt)
+ return context withinSecondTry typed1(tree1, mode, pt)
}
case _ => ()
}
@@ -4483,64 +4478,56 @@ trait Typers extends Adaptations with Tags {
}
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
+ // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)`
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (stableApplication && isPatternMode) {
- // treat stable function applications f() as expressions.
- typed1(tree, (mode &~ PATTERNmode) | EXPRmode, pt)
- } else {
- val funpt = if (isPatternMode) pt else WildcardType
- val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
- val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
-
- def onError(reportError: => Tree): Tree = {
- fun match {
- case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
- val qual1 = typedQualifier(qual)
- if (treeInfo.isVariableOrGetter(qual1)) {
- if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
- convertToAssignment(fun, qual1, name, args)
- } else {
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- case _ =>
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- }
- val silentResult = silent(
- op = _.typed(fun, mode.forFunMode, funpt),
- reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
- newtree = if (mode.inExprMode) tree else context.tree
- )
- silentResult match {
- case SilentResultValue(fun1) =>
- val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
- val noSecondTry = (
- isPastTyper
- || (fun2.symbol ne null) && fun2.symbol.isConstructor
- || (fun2.tpe match { case mt: MethodType => mt.isImplicit case _ => false })
- )
- val isFirstTry = !noSecondTry && (
- fun2 match {
- case Select(_, _) => mode inExprModeButNot SNDTRYmode
- case _ => false
- }
- )
- if (isFirstTry)
- tryTypedApply(fun2, args)
- else
- doTypedApply(tree, fun2, args, mode, pt)
-
- case SilentTypeError(err) =>
- onError({issue(err); setError(tree)})
- }
+ val funpt = if (mode.inPatternMode) pt else WildcardType
+ val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+ val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
+
+ def onError(reportError: => Tree): Tree = fun match {
+ case Select(qual, name) if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+ val qual1 = typedQualifier(qual)
+ if (treeInfo.isVariableOrGetter(qual1)) {
+ if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
+ convertToAssignment(fun, qual1, name, args)
+ }
+ else {
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ case _ =>
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ val silentResult = silent(
+ op = _.typed(fun, mode.forFunMode, funpt),
+ reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
+ newtree = if (mode.inExprMode) tree else context.tree
+ )
+ silentResult match {
+ case SilentResultValue(fun1) =>
+ val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
+ if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
+ val noSecondTry = (
+ isPastTyper
+ || context.inSecondTry
+ || (fun2.symbol ne null) && fun2.symbol.isConstructor
+ || isImplicitMethodType(fun2.tpe)
+ )
+ val isFirstTry = fun2 match {
+ case Select(_, _) => !noSecondTry && mode.inExprMode
+ case _ => false
+ }
+ if (isFirstTry)
+ tryTypedApply(fun2, args)
+ else
+ doTypedApply(tree, fun2, args, mode, pt)
+ case SilentTypeError(err) =>
+ onError({ issue(err); setError(tree) })
}
}
- // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+ // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
// convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
// where Array HK gets applied (N-1) times
object ArrayInstantiation {
@@ -4647,7 +4634,7 @@ trait Typers extends Adaptations with Tags {
val owntype = (
if (!mix.isEmpty) findMixinSuper(clazz.tpe)
- else if (mode.inAll(SUPERCONSTRmode)) clazz.info.firstParent
+ else if (context.inSuperInit) clazz.info.firstParent
else intersectionType(clazz.info.parents)
)
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
@@ -4691,7 +4678,7 @@ trait Typers extends Adaptations with Tags {
// symbol not found? --> try to convert implicitly to a type that does have the required
// member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
// xml member to StringContext, which in turn has an unapply[Seq] method)
- if (name != nme.CONSTRUCTOR && mode.inExprModeOr(PATTERNmode)) {
+ if (name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) {
val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true)
if ((qual1 ne qual) && !qual1.isErrorTyped)
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
@@ -4741,12 +4728,11 @@ trait Typers extends Adaptations with Tags {
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
+ case SilentTypeError(err: AccessTypeError) =>
+ (tree1, Some(err))
case SilentTypeError(err) =>
- if (err.kind != ErrorKinds.Access) {
- context issue err
- return setError(tree)
- }
- else (tree1, Some(err))
+ context issue err
+ return setError(tree)
case SilentResultValue(treeAndPre) =>
(stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
}
@@ -4780,56 +4766,55 @@ trait Typers extends Adaptations with Tags {
}
}
- def typedSelectOrSuperCall(tree: Select) = {
- val qual = tree.qualifier
- val name = tree.name
- qual match {
- case _: Super if name == nme.CONSTRUCTOR =>
- val qual1 =
- typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
- // the qualifier type of a supercall constructor is its first parent class
- typedSelect(tree, qual1, nme.CONSTRUCTOR)
- case _ =>
- if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
- var qual1 = checkDead(typedQualifier(qual, mode))
- if (name.isTypeName) qual1 = checkStable(qual1)
-
- val tree1 = // temporarily use `filter` and an alternative for `withFilter`
- if (name == nme.withFilter)
- silent(_ => typedSelect(tree, qual1, name)) orElse { _ =>
- silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
- case SilentResultValue(result2) =>
- unit.deprecationWarning(
- tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen +
- ", using `filter' method instead")
- result2
- case SilentTypeError(err) =>
- WithFilterError(tree, err)
- }
- }
- else
- typedSelect(tree, qual1, name)
-
- if (tree.isInstanceOf[PostfixSelect])
- checkFeature(tree.pos, PostfixOpsFeature, name.decode)
- if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
- checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
-
- if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
- else tree1
+ // temporarily use `filter` as an alternative for `withFilter`
+ def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = {
+ def warn() = unit.deprecationWarning(tree.pos, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
+ silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ =>
+ silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match {
+ case SilentResultValue(res) => warn() ; res
+ case SilentTypeError(err) => WithFilterError(tree, err)
+ }
}
}
+ def typedSelectOrSuperCall(tree: Select) = tree match {
+ case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
+ // the qualifier type of a supercall constructor is its first parent class
+ typedSelect(tree, typedSelectOrSuperQualifier(qual), nme.CONSTRUCTOR)
+ case Select(qual, name) =>
+ if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
+ val qualTyped = checkDead(typedQualifier(qual, mode))
+ val qualStableOrError = (
+ if (qualTyped.isErrorTyped || !name.isTypeName || treeInfo.admitsTypeSelection(qualTyped))
+ qualTyped
+ else
+ UnstableTreeError(qualTyped)
+ )
+ val tree1 = name match {
+ case nme.withFilter => tryWithFilterAndFilter(tree, qualStableOrError)
+ case _ => typedSelect(tree, qualStableOrError, name)
+ }
+ def sym = tree1.symbol
+ if (tree.isInstanceOf[PostfixSelect])
+ checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+ if (sym != null && sym.isOnlyRefinementMember)
+ checkFeature(tree1.pos, ReflectiveCallsFeature, sym.toString)
+
+ qualStableOrError.symbol match {
+ case s: Symbol if s.isRootPackage => treeCopy.Ident(tree1, name)
+ case _ => tree1
+ }
+ }
/* A symbol qualifies if:
* - it exists
* - it is not stale (stale symbols are made to disappear here)
- * - if we are in a pattern constructor, method definitions do not qualify
+ * - if we are in a constructor pattern, method definitions do not qualify
* unless they are stable. Otherwise, 'case x :: xs' would find the :: method.
*/
def qualifies(sym: Symbol) = (
sym.hasRawInfo
&& reallyExists(sym)
- && !(inPatternConstructor && sym.isMethod && !sym.isStable)
+ && !(mode.typingConstructorPattern && sym.isMethod && !sym.isStable)
)
/* Attribute an identifier consisting of a simple name or an outer reference.
@@ -4852,7 +4837,7 @@ trait Typers extends Adaptations with Tags {
setError(tree)
}
// ignore current variable scope in patterns to enforce linearity
- val startContext = if (mode.inNone(PATTERNmode | TYPEPATmode)) context else context.outer
+ val startContext = if (mode.typingPatternOrTypePat) context.outer else context
val nameLookup = tree.symbol match {
case NoSymbol => startContext.lookupSymbol(name, qualifies)
case sym => LookupSucceeded(EmptyTree, sym)
@@ -4888,8 +4873,8 @@ trait Typers extends Adaptations with Tags {
def typedIdentOrWildcard(tree: Ident) = {
val name = tree.name
if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
- if ((name == nme.WILDCARD && mode.inPatternNotFunMode) ||
- (name == tpnme.WILDCARD && mode.inAll(TYPEmode)))
+ if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) ||
+ (name == tpnme.WILDCARD && mode.inTypeMode))
tree setType makeFullyDefined(pt)
else
typedIdent(tree, name)
@@ -4916,40 +4901,65 @@ trait Typers extends Adaptations with Tags {
}
def typedAppliedTypeTree(tree: AppliedTypeTree) = {
- val tpt = tree.tpt
- val args = tree.args
- val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+ val tpt = tree.tpt
+ val args = tree.args
+ val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+ def isPoly = tpt1.tpe.isInstanceOf[PolyType]
+ def isComplete = tpt1.symbol.rawInfo.isComplete
+
if (tpt1.isErrorTyped) {
tpt1
} else if (!tpt1.hasSymbolField) {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
+
if (sameLength(tparams, args)) {
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 =
- if (!tpt1.symbol.rawInfo.isComplete)
+ if (!isComplete)
args mapConserve (typedHigherKindedType(_, mode))
// if symbol hasn't been fully loaded, can't check kind-arity
else map2Conserve(args, tparams) { (arg, tparam) =>
//@M! the polytype denotes the expected kind
- typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyTpe))
}
val argtypes = args1 map (_.tpe)
- foreach2(args, tparams)((arg, tparam) => arg match {
- // note: can't use args1 in selector, because Bind's got replaced
- case Bind(_, _) =>
- if (arg.symbol.isAbstractType)
- arg.symbol setInfo // XXX, feedback. don't trackSymInfo here!
- TypeBounds(
- lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
- glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
- case _ =>
- })
+ foreach2(args, tparams) { (arg, tparam) =>
+ // note: can't use args1 in selector, because Binds got replaced
+ val asym = arg.symbol
+ def abounds = asym.info.bounds
+ def tbounds = tparam.info.bounds
+ def enhanceBounds(): Unit = {
+ val TypeBounds(lo0, hi0) = abounds
+ val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes)
+ val lo = lub(List(lo0, lo1))
+ val hi = glb(List(hi0, hi1))
+ if (!(lo =:= lo0 && hi =:= hi0))
+ asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi))
+ }
+ if (asym != null && asym.isAbstractType) {
+ // See pos/t1786 to follow what's happening here.
+ def canEnhanceIdent = (
+ asym.hasCompleteInfo
+ && tparam.exists /* sometimes it is NoSymbol */
+ && tparam.hasCompleteInfo /* SI-2940 */
+ && !tparam.isFBounded /* SI-2251 */
+ && !tparam.isHigherOrderTypeParameter
+ && !(abounds.hi <:< tbounds.hi)
+ && asym.isSynthetic /* this limits us to placeholder tparams, excluding named ones */
+ )
+ arg match {
+ case Bind(_, _) => enhanceBounds()
+ case Ident(name) if canEnhanceIdent => enhanceBounds()
+ case _ =>
+ }
+ }
+ }
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
- if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
+ if (isPoly) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
@@ -4990,52 +5000,72 @@ trait Typers extends Adaptations with Tags {
}
def typedAlternative(alt: Alternative) = {
- val alts1 = alt.trees mapConserve (alt => typed(alt, mode | ALTmode, pt))
- treeCopy.Alternative(tree, alts1) setType pt
+ context withinPatAlternative (
+ treeCopy.Alternative(tree, alt.trees mapConserve (alt => typed(alt, mode, pt))) setType pt
+ )
}
-
def typedStar(tree: Star) = {
- if (mode.inNone(STARmode) && !isPastTyper)
+ if (!context.starPatterns && !isPastTyper)
StarPatternWithVarargParametersError(tree)
+
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
def typedUnApply(tree: UnApply) = {
val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args.length), tree.args.length)
+ val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
val args1 = map2(tree.args, tpes)(typedPattern)
treeCopy.UnApply(tree, fun1, args1) setType pt
}
- def typedTry(tree: Try) = {
- var block1 = typed(tree.block, pt)
- var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
-
- for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
- def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ def issueTryWarnings(tree: Try): Try = {
+ def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
- cdef.pat match {
+ def warn(name: Name) = {
+ val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning."
+ context.warning(cdef.pat.pos, msg)
+ }
+ if (cdef.guard.isEmpty) cdef.pat match {
case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
- case i @ Ident(name) if unbound(i) => warn(name)
- case _ =>
+ case i @ Ident(name) if unbound(i) => warn(name)
+ case _ =>
}
}
-
- val finalizer1 =
- if (tree.finalizer.isEmpty) tree.finalizer
- else typed(tree.finalizer, UnitClass.tpe)
- val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
- if (needAdapt) {
- block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, mode, owntype))
+ if (!isPastTyper) tree match {
+ case Try(_, Nil, fin) =>
+ if (fin eq EmptyTree)
+ context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.")
+ case Try(_, catches, _) =>
+ catches foreach checkForCatchAll
}
+ tree
+ }
- treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+ def typedTry(tree: Try) = {
+ val Try(block, catches, fin) = tree
+ val block1 = typed(block, pt)
+ val catches1 = typedCases(catches, ThrowableTpe, pt)
+ val fin1 = if (fin.isEmpty) fin else typed(fin, UnitTpe)
+
+ def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType
+
+ issueTryWarnings(
+ if (isFullyDefined(pt))
+ finish(pt)
+ else block1 :: catches1 map (_.tpe.deconst) match {
+ case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+ case tpes =>
+ val lub = weakLub(tpes)
+ val block2 = adapt(block1, mode, lub)
+ val catches2 = catches1 map (adaptCase(_, mode, lub))
+ treeCopy.Try(tree, block2, catches2, fin1) setType lub
+ }
+ )
}
def typedThrow(tree: Throw) = {
- val expr1 = typed(tree.expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
- treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+ val expr1 = typedByValueExpr(tree.expr, ThrowableTpe)
+ treeCopy.Throw(tree, expr1) setType NothingTpe
}
def typedTyped(tree: Typed) = {
@@ -5056,8 +5086,8 @@ trait Typers extends Adaptations with Tags {
typedEta(checkDead(exprTyped))
}
- case Ident(tpnme.WILDCARD_STAR) =>
- val exprTyped = typed(expr, mode.onlySticky, WildcardType)
+ case t if treeInfo isWildcardStarType t =>
+ val exprTyped = typed(expr, mode.onlySticky)
def subArrayType(pt: Type) =
if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
else {
@@ -5067,7 +5097,7 @@ trait Typers extends Adaptations with Tags {
val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
+ case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
}
exprAdapted.tpe.baseType(baseClass) match {
case TypeRef(_, _, List(elemtp)) =>
@@ -5081,11 +5111,10 @@ trait Typers extends Adaptations with Tags {
val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst)
val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
- if (isPatternMode) {
+ if (mode.inPatternMode) {
val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
-
// make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- val ptDefined = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+ val ptDefined = ensureFullyDefined(pt) // FIXME this is probably redundant now that we don't dropExistenial in pattern mode.
val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
treeTyped setType ownType
@@ -5110,7 +5139,7 @@ trait Typers extends Adaptations with Tags {
//val undets = context.undetparams
// @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
- val fun1 = typed(fun, mode.forFunMode | TAPPmode, WildcardType)
+ val fun1 = typed(fun, mode.forFunMode | TAPPmode)
val tparams = fun1.symbol.typeParams
//@M TODO: val undets_fun = context.undetparams ?
@@ -5122,7 +5151,7 @@ trait Typers extends Adaptations with Tags {
// @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyTpe))
}
else {
//@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
@@ -5140,10 +5169,9 @@ trait Typers extends Adaptations with Tags {
def typedApplyDynamic(tree: ApplyDynamic) = {
assert(phase.erasedTypes)
- val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
- val qual1 = typed(tree.qual, AnyRefClass.tpe)
- val args1 = tree.args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
- treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
+ val qual1 = typed(tree.qual, AnyRefTpe)
+ val args1 = tree.args mapConserve (arg => typed(arg, AnyRefTpe))
+ treeCopy.ApplyDynamic(tree, qual1, args1) setType AnyRefTpe
}
def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
@@ -5155,33 +5183,51 @@ trait Typers extends Adaptations with Tags {
treeCopy.ReferenceToBoxed(tree, id1) setType tpe
}
- def typedLiteral(tree: Literal) = {
- val value = tree.value
- // Warn about likely interpolated strings which are missing their interpolators
- if (settings.lint) value match {
+ // Warn about likely interpolated strings which are missing their interpolators
+ def warnMissingInterpolator(tree: Literal) {
+ // Unfortunately implicit not found strings looks for all the world like
+ // missing interpolators.
+ def isArgToImplicitNotFound = context.enclosingApply.tree match {
+ case Apply(fn, _) => fn.symbol.enclClass == ImplicitNotFoundClass
+ case _ => false
+ }
+ tree.value match {
case Constant(s: String) =>
def names = InterpolatorIdentRegex findAllIn s map (n => newTermName(n stripPrefix "$"))
- val shouldWarn = (
+ def suspicious = (
(InterpolatorCodeRegex findFirstIn s).nonEmpty
|| (names exists (n => context.lookupSymbol(n, _ => true).symbol.exists))
)
- if (shouldWarn)
+ val noWarn = (
+ isArgToImplicitNotFound
+ || !(s contains ' ') // another heuristic - e.g. a string with only "$asInstanceOf"
+ )
+ if (!noWarn && suspicious)
unit.warning(tree.pos, "looks like an interpolated String; did you forget the interpolator?")
case _ =>
}
+ }
+
+ def typedLiteral(tree: Literal) = {
+ if (settings.lint)
+ warnMissingInterpolator(tree)
tree setType (
- if (value.tag == UnitTag) UnitClass.tpe
- else ConstantType(value))
+ if (tree.value.tag == UnitTag) UnitTpe
+ else ConstantType(tree.value))
}
def typedSingletonTypeTree(tree: SingletonTypeTree) = {
- val ref1 = checkStable(
- context.withImplicitsDisabled(
- typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
- )
- )
- tree setType ref1.tpe.resultType
+ val refTyped =
+ context.withImplicitsDisabled {
+ typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
+ }
+
+ if (!refTyped.isErrorTyped)
+ tree setType refTyped.tpe.resultType
+
+ if (treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(refTyped)
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
@@ -5191,8 +5237,8 @@ trait Typers extends Adaptations with Tags {
}
def typedTypeBoundsTree(tree: TypeBoundsTree) = {
- val lo1 = typedType(tree.lo, mode)
- val hi1 = typedType(tree.hi, mode)
+ val lo1 = if (tree.lo.isEmpty) TypeTree(NothingTpe) else typedType(tree.lo, mode)
+ val hi1 = if (tree.hi.isEmpty) TypeTree(AnyTpe) else typedType(tree.hi, mode)
treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
}
@@ -5217,7 +5263,7 @@ trait Typers extends Adaptations with Tags {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
- tree setType AnyClass.tpe
+ tree setType AnyTpe
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
@@ -5302,7 +5348,12 @@ trait Typers extends Adaptations with Tags {
"context.owner" -> context.owner
)
)
- typed1(tree, mode, dropExistential(ptPlugins))
+ val ptWild = if (mode.inPatternMode)
+ ptPlugins // SI-5022 don't widen pt for patterns as types flow from it to the case body.
+ else
+ dropExistential(ptPlugins) // FIXME: document why this is done.
+
+ typed1(tree, mode, ptWild)
}
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
@@ -5353,7 +5404,7 @@ trait Typers extends Adaptations with Tags {
}
def atOwner(owner: Symbol): Typer =
- newTyper(context.make(context.tree, owner))
+ newTyper(context.make(owner = owner))
def atOwner(tree: Tree, owner: Symbol): Typer =
newTyper(context.make(tree, owner))
@@ -5361,10 +5412,12 @@ trait Typers extends Adaptations with Tags {
/** Types expression or definition `tree`.
*/
def typed(tree: Tree): Tree = {
- val ret = typed(tree, EXPRmode, WildcardType)
+ val ret = typed(tree, context.defaultModeForTyped, WildcardType)
ret
}
+ def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt)
+
def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
// TODO: see if this formulation would impose any penalty, since
@@ -5374,13 +5427,16 @@ trait Typers extends Adaptations with Tags {
/** Types expression `tree` with given prototype `pt`.
*/
def typed(tree: Tree, pt: Type): Tree =
- typed(tree, EXPRmode, pt)
+ typed(tree, context.defaultModeForTyped, pt)
+
+ def typed(tree: Tree, mode: Mode): Tree =
+ typed(tree, mode, WildcardType)
/** Types qualifier `tree` of a select node.
* E.g. is tree occurs in a context like `tree.m`.
*/
def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree =
- typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
+ typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
/** Types qualifier `tree` of a select node.
* E.g. is tree occurs in a context like `tree.m`.
@@ -5391,8 +5447,11 @@ trait Typers extends Adaptations with Tags {
def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
/** Types function part of an application */
- def typedOperator(tree: Tree): Tree =
- typed(tree, EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
+ def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes)
+
+ // the qualifier type of a supercall constructor is its first parent class
+ private def typedSelectOrSuperQualifier(qual: Tree) =
+ context withinSuperInit typed(qual, PolyQualifierModes)
/** Types a pattern with prototype `pt` */
def typedPattern(tree: Tree, pt: Type): Tree = {
@@ -5414,7 +5473,10 @@ trait Typers extends Adaptations with Tags {
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
- typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
+ case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
+ case pat => pat
+ }
}
/** Types a (fully parameterized) type tree */
@@ -5427,10 +5489,10 @@ trait Typers extends Adaptations with Tags {
/** Types a higher-kinded type tree -- pt denotes the expected kind*/
def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree =
if (pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
- else typed(tree, HKmode, pt)
+ else context withinTypeConstructorAllowed typed(tree, NOmode, pt)
def typedHigherKindedType(tree: Tree, mode: Mode): Tree =
- typed(tree, HKmode, WildcardType)
+ context withinTypeConstructorAllowed typed(tree)
/** Types a type constructor tree used in a new or supertype */
def typedTypeConstructor(tree: Tree, mode: Mode): Tree = {
@@ -5481,22 +5543,22 @@ trait Typers extends Adaptations with Tags {
tree1
}
- val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous))
+ val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree
val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
- if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree1.symbol) else AnyClass.tpe
+ if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImplRef(ddef, tree1) else AnyTpe
}
- def transformedOr(tree: Tree, op: => Tree): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => op
+ def transformedOr(tree: Tree, op: => Tree): Tree = transformed remove tree match {
+ case Some(tree1) => tree1
+ case _ => op
}
- def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => typed(tree, mode, pt)
- }
+ def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed remove tree match {
+ case Some(tree1) => tree1
+ case _ => typed(tree, mode, pt)
}
}
+}
object TypersStats {
import scala.reflect.internal.TypesStats._
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index d55dce70c7..af3f772f79 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -33,12 +33,13 @@ trait Unapplies extends ast.TreeDSL
/** returns type list for return type of the extraction
* @see extractorFormalTypes
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, nbSubPats: Int) = {
+ def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
assert(ufn.isMethod, ufn)
+ val nbSubPats = args.length
//Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
ufn.name match {
case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn)
+ val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
else formals
case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index aa4128f1a7..7f9b81e1ec 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -281,11 +281,24 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
private def traverse() = {
val classBuf = immutable.Vector.newBuilder[ClassRep]
val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
- dir foreach { f =>
- if (!f.isDirectory && validClassFile(f.name))
- classBuf += ClassRep(Some(f), None)
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new DirectoryClassPath(f, context)
+ dir foreach {
+ f =>
+ // Optimization: We assume the file was not changed since `dir` called
+ // `Path.apply` and categorized existent files as `Directory`
+ // or `File`.
+ val isDirectory = f match {
+ case pf: io.PlainFile => pf.givenPath match {
+ case _: io.Directory => true
+ case _: io.File => false
+ case _ => f.isDirectory
+ }
+ case _ =>
+ f.isDirectory
+ }
+ if (!isDirectory && validClassFile(f.name))
+ classBuf += ClassRep(Some(f), None)
+ else if (isDirectory && validPackage(f.name))
+ packageBuf += new DirectoryClassPath(f, context)
}
(packageBuf.result(), classBuf.result())
}
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index 26d19906c2..58a5442465 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package util
import scala.reflect.internal.Chars._
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 76b1394b85..b804bfb842 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools
+package scala
+package tools
package nsc
package util
@@ -107,7 +108,7 @@ object ShowPickled extends Names {
var result = 0L
var b = 0L
do {
- b = data(idx)
+ b = data(idx).toLong
idx += 1
result = (result << 7) + (b & 0x7f)
} while((b & 0x80) != 0L)
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index 3cdbcc5110..d2e9238e8f 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -40,12 +40,22 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
tree = add(tree)
}
- def iterator = {
- def elems(t: Tree): Iterator[T] = {
- if (t eq null) Iterator.empty
- else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
+ def iterator = toList.iterator
+
+ override def foreach[U](f: T => U) {
+ def loop(t: Tree) {
+ if (t ne null) {
+ loop(t.l)
+ f(t.elem)
+ loop(t.r)
+ }
}
- elems(tree)
+ loop(tree)
+ }
+ override def toList = {
+ val xs = scala.collection.mutable.ListBuffer[T]()
+ foreach(xs += _)
+ xs.toList
}
override def toString(): String = {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 039fec8605..1e43d18900 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -3,7 +3,9 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
@@ -131,4 +133,13 @@ package object util {
@deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
+
+ @deprecated("Moved to scala.reflect.internal.util.AbstractFileClassLoader", "2.11.0")
+ type AbstractFileClassLoader = scala.reflect.internal.util.AbstractFileClassLoader
+
+ @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+ val ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
+
+ @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+ type ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index aa4ddc8ba8..5a0ff4f6db 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -5,6 +5,7 @@ import scala.reflect.reify.Taggers
import scala.tools.nsc.typechecker.{ Analyzer, Macros }
import scala.reflect.runtime.Macros.currentMirror
import scala.reflect.api.Universe
+import scala.reflect.macros.compiler.DefaultMacroCompiler
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -26,7 +27,7 @@ trait FastTrack {
final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree]) extends (MacroArgs => Any) {
def validate(tree: Tree) = pf isDefinedAt Applied(tree)
- def apply(margs: MacroArgs) = {
+ def apply(margs: MacroArgs): margs.c.Expr[Nothing] = {
val MacroArgs(c, _) = margs
// Macros validated that the pf is defined here - and there's not much we could do if it weren't.
c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 4e8f02084d..8e1bcb5f87 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -1,8 +1,9 @@
package scala.tools.reflect
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
+import scala.reflect.internal.util.OffsetPosition
abstract class MacroImplementations {
val c: Context
@@ -25,16 +26,12 @@ abstract class MacroImplementations {
c.abort(args(parts.length-1).pos,
"too many arguments for interpolated string")
}
- val stringParts = parts map {
- case Literal(Constant(s: String)) => s
- case _ => throw new IllegalArgumentException("argument parts must be a list of string literals")
- }
- val pi = stringParts.iterator
+ val pi = parts.iterator
val bldr = new java.lang.StringBuilder
val evals = ListBuffer[ValDef]()
val ids = ListBuffer[Ident]()
- val argsStack = Stack(args : _*)
+ val argStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
val freshName = newTermName(c.freshName("arg$"))
@@ -81,50 +78,73 @@ abstract class MacroImplementations {
}
def copyString(first: Boolean): Unit = {
- val str = StringContext.treatEscapes(pi.next())
+ val strTree = pi.next()
+ val rawStr = strTree match {
+ case Literal(Constant(str: String)) => str
+ case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+ }
+ val str = StringContext.treatEscapes(rawStr)
val strLen = str.length
val strIsEmpty = strLen == 0
- var start = 0
+ def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
+ def isPercent(idx: Int) = charAtIndexIs(idx, '%')
+ def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
var idx = 0
+ def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
+ def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
+ def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+
+ // STEP 1: handle argument conversion
+ // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
+ // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+ // 3) "...${smth}%" => error
+ // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+ // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+ // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
+ // 7) "...${smth}[%illegalJavaConversion]" => error
if (!first) {
- val arg = argsStack.pop()
- if (strIsEmpty || (str charAt 0) != '%') {
- bldr append "%s"
- defval(arg, AnyTpe)
- } else {
+ val arg = argStack.pop()
+ if (isConversion(0)) {
// PRE str is not empty and str(0) == '%'
// argument index parameter is not allowed, thus parse
// [flags][width][.precision]conversion
var pos = 1
- while(pos < strLen && isFlag(str charAt pos)) pos += 1
- while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- if(pos < strLen && str.charAt(pos) == '.') { pos += 1
- while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ while (pos < strLen && isFlag(str charAt pos)) pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ if (pos < strLen && str.charAt(pos) == '.') {
+ pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
}
- if(pos < strLen) {
+ if (pos < strLen) {
conversionType(str charAt pos, arg) match {
case Some(tpe) => defval(arg, tpe)
- case None => c.error(arg.pos, "illegal conversion character")
+ case None => illegalConversionCharacter(pos)
}
} else {
- // TODO: place error message on conversion string
- c.error(arg.pos, "wrong conversion string")
+ wrongConversionString(pos - 1)
}
+ idx = 1
+ } else {
+ bldr append "%s"
+ defval(arg, AnyTpe)
}
- idx = 1
}
+
+ // STEP 2: handle the rest of the text
+ // 1) %n tokens are left as is
+ // 2) %% tokens are left as is
+ // 3) other usages of percents are reported as errors
if (!strIsEmpty) {
- val len = str.length
- while (idx < len) {
- def notPercentN = str(idx) != '%' || (idx + 1 < len && str(idx + 1) != 'n')
- if (str(idx) == '%' && notPercentN) {
- bldr append (str substring (start, idx)) append "%%"
- start = idx + 1
+ while (idx < strLen) {
+ if (isPercent(idx)) {
+ if (isConversion(idx)) nonEscapedPercent(idx)
+ else idx += 1 // skip n and % in %n and %%
}
idx += 1
}
- bldr append (str substring (start, idx))
+ bldr append (str take idx)
}
}
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index f6ed5f8f1c..c53d10bd87 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -1,7 +1,7 @@
-package scala.tools
+package scala
+package tools
package reflect
-import scala.tools.nsc.EXPRmode
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.io.VirtualDirectory
@@ -129,7 +129,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// it inaccessible then please put it somewhere designed for that
// rather than polluting the empty package with synthetics.
val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
- build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
+ build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectTpe), newScope, ownerClass))
val owner = ownerClass.newLocalDummy(expr.pos)
val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
@@ -165,7 +165,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
- currentTyper.silent(_.typed(expr, EXPRmode, pt)) match {
+ currentTyper.silent(_.typed(expr, pt), reportAmbiguousErrors = false) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
result
@@ -180,15 +180,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
transformDuringTyper(tree, withImplicitViewsDisabled = false, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, tree) => {
trace("inferring implicit %s (macros = %s): ".format(if (isView) "view" else "value", !withMacrosDisabled))(showAttributed(pt, true, true, settings.Yshowsymkinds.value))
- val context = currentTyper.context
- analyzer.inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos) match {
- case failure if failure.tree.isEmpty =>
- trace("implicit search has failed. to find out the reason, turn on -Xlog-implicits: ")(failure.tree)
- if (context.hasErrors) throw ToolBoxError("reflective implicit search has failed: %s".format(context.errBuffer.head.errMsg))
- EmptyTree
- case success =>
- success.tree
- }
+ analyzer.inferImplicit(tree, pt, isView, currentTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw ToolBoxError(msg))
})
def compile(expr0: Tree): () => Any = {
@@ -204,7 +196,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val (obj, _) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
nextWrapperModuleName())
- val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass)
+ val minfo = ClassInfoType(List(ObjectTpe), newScope, obj.moduleClass)
obj.moduleClass setInfo minfo
obj setInfo obj.moduleClass.tpe
@@ -214,7 +206,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val (fv, name) = schema
meth.newValueParameter(name, newFlags = if (fv.hasStableFlag) STABLE else 0) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType))
}
- meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyClass.tpe)
+ meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyTpe)
minfo.decls enter meth
def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
case Some(sym) if sym != null && sym != NoSymbol => sym.owner
@@ -226,7 +218,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val moduledef = ModuleDef(
obj,
Template(
- List(TypeTree(ObjectClass.tpe)),
+ List(TypeTree(ObjectTpe)),
emptyValDef,
NoMods,
List(),
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index c34edb8ba0..20567719be 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -25,6 +25,7 @@ trait WrappedProperties extends PropertiesTrait {
override def clearProp(name: String) = wrap(super.clearProp(name)).orNull
override def envOrElse(name: String, alt: String) = wrap(super.envOrElse(name, alt)) getOrElse alt
override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten
+ override def envOrSome(name: String, alt: Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt
def systemProperties: Iterator[(String, String)] = {
import scala.collection.JavaConverters._
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 8d65c40a01..bdd6a02043 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -3,29 +3,45 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package util
import scala.tools.reflect.WrappedProperties.AccessControl
-import nsc.{ Settings, GenericRunnerSettings }
-import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
-import nsc.io.{ File, Directory, Path, AbstractFile }
+import scala.tools.nsc.{ Settings, GenericRunnerSettings }
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
+import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
// Loosely based on the draft specification at:
-// https://wiki.scala-lang.org/display/SW/Classpath
+// https://wiki.scala-lang.org/display/SIW/Classpath
object PathResolver {
// Imports property/environment functions which suppress security exceptions.
import AccessControl._
+ import scala.compat.Platform.EOL
+
+ implicit class MkLines(val t: TraversableOnce[_]) extends AnyVal {
+ def mkLines: String = t.mkString("", EOL, EOL)
+ def mkLines(header: String, indented: Boolean = false, embraced: Boolean = false): String = {
+ val space = "\u0020"
+ val sep = if (indented) EOL + space * 2 else EOL
+ val (lbrace, rbrace) = if (embraced) (space + "{", EOL + "}") else ("", "")
+ t.mkString(header + lbrace + sep, sep, rbrace + EOL)
+ }
+ }
+ implicit class AsLines(val s: String) extends AnyVal {
+ // sm"""...""" could do this in one pass
+ def asLines = s.trim.stripMargin.lines.mkLines
+ }
/** pretty print class path */
def ppcp(s: String) = split(s) match {
case Nil => ""
case Seq(x) => x
- case xs => xs map ("\n" + _) mkString
+ case xs => xs.mkString(EOL, EOL, "")
}
/** Values found solely by inspecting environment or property variables.
@@ -38,7 +54,7 @@ object PathResolver {
/** Environment variables which java pays attention to so it
* seems we do as well.
*/
- def sourcePathEnv = envOrElse("SOURCEPATH", "")
+ def sourcePathEnv = envOrElse("SOURCEPATH", "")
def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
def javaExtDirs = propOrEmpty("java.ext.dirs")
@@ -49,20 +65,14 @@ object PathResolver {
def javaUserClassPath = propOrElse("java.class.path", "")
def useJavaClassPath = propOrFalse("scala.usejavacp")
- override def toString = """
+ override def toString = s"""
|object Environment {
- | scalaHome = %s (useJavaClassPath = %s)
- | javaBootClassPath = <%d chars>
- | javaExtDirs = %s
- | javaUserClassPath = %s
- | scalaExtDirs = %s
- |}""".trim.stripMargin.format(
- scalaHome, useJavaClassPath,
- javaBootClassPath.length,
- ppcp(javaExtDirs),
- ppcp(javaUserClassPath),
- ppcp(scalaExtDirs)
- )
+ | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath)
+ | javaBootClassPath = <${javaBootClassPath.length} chars>
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ |}""".asLines
}
/** Default values based on those in Environment as interpreted according
@@ -102,23 +112,59 @@ object PathResolver {
def scalaExtDirs = Environment.scalaExtDirs
def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
- override def toString = """
+ override def toString = s"""
|object Defaults {
- | scalaHome = %s
- | javaBootClassPath = %s
- | scalaLibDirFound = %s
- | scalaLibFound = %s
- | scalaBootClassPath = %s
- | scalaPluginPath = %s
- |}""".trim.stripMargin.format(
- scalaHome,
- ppcp(javaBootClassPath),
- scalaLibDirFound, scalaLibFound,
- ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
- )
+ | scalaHome = $scalaHome
+ | javaBootClassPath = ${ppcp(javaBootClassPath)}
+ | scalaLibDirFound = $scalaLibDirFound
+ | scalaLibFound = $scalaLibFound
+ | scalaBootClassPath = ${ppcp(scalaBootClassPath)}
+ | scalaPluginPath = ${ppcp(scalaPluginPath)}
+ |}""".asLines
+ }
+
+ /** Locations discovered by supplemental heuristics.
+ */
+ object SupplementalLocations {
+
+ /** The platform-specific support jar.
+ *
+ * Usually this is `tools.jar` in the jdk/lib directory of the platform distribution.
+ *
+ * The file location is determined by probing the lib directory under JDK_HOME or JAVA_HOME,
+ * if one of those environment variables is set, then the lib directory under java.home,
+ * and finally the lib directory under the parent of java.home. Or, as a last resort,
+ * search deeply under those locations (except for the parent of java.home, on the notion
+ * that if this is not a canonical installation, then that search would have little
+ * chance of succeeding).
+ */
+ def platformTools: Option[File] = {
+ val jarName = "tools.jar"
+ def jarPath(path: Path) = (path / "lib" / jarName).toFile
+ def jarAt(path: Path) = {
+ val f = jarPath(path)
+ if (f.isFile) Some(f) else None
+ }
+ val jdkDir = {
+ val d = Directory(jdkHome)
+ if (d.isDirectory) Some(d) else None
+ }
+ def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
+
+ val home = envOrSome("JDK_HOME", envOrNone("JAVA_HOME")) map (p => Path(p))
+ val install = Some(Path(javaHome))
+
+ (home flatMap jarAt) orElse (install flatMap jarAt) orElse (install map (_.parent) flatMap jarAt) orElse
+ (jdkDir flatMap deeply)
+ }
+ override def toString = s"""
+ |object SupplementalLocations {
+ | platformTools = $platformTools
+ |}""".asLines
}
- def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { // called from scalap
+ // called from scalap
+ def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
val s = new Settings()
s.classpath.value = path
new PathResolver(s, context) result
@@ -143,9 +189,10 @@ object PathResolver {
}
}
}
-import PathResolver.{ Defaults, Environment, ppcp }
class PathResolver(settings: Settings, context: JavaContext) {
+ import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+
def this(settings: Settings) = this(settings, if (settings.inline) new JavaContext else DefaultJavaContext)
private def cmdLineOrElse(name: String, alt: String) = {
@@ -216,24 +263,18 @@ class PathResolver(settings: Settings, context: JavaContext) {
lazy val containers = basis.flatten.distinct
- override def toString = """
+ override def toString = s"""
|object Calculated {
- | scalaHome = %s
- | javaBootClassPath = %s
- | javaExtDirs = %s
- | javaUserClassPath = %s
- | useJavaClassPath = %s
- | scalaBootClassPath = %s
- | scalaExtDirs = %s
- | userClassPath = %s
- | sourcePath = %s
- |}""".trim.stripMargin.format(
- scalaHome,
- ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
- useJavaClassPath,
- ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
- ppcp(sourcePath)
- )
+ | scalaHome = $scalaHome
+ | javaBootClassPath = ${ppcp(javaBootClassPath)}
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | useJavaClassPath = $useJavaClassPath
+ | scalaBootClassPath = ${ppcp(scalaBootClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ | userClassPath = ${ppcp(userClassPath)}
+ | sourcePath = ${ppcp(sourcePath)}
+ |}""".asLines
}
def containers = Calculated.containers
@@ -241,13 +282,12 @@ class PathResolver(settings: Settings, context: JavaContext) {
lazy val result = {
val cp = new JavaClassPath(containers.toIndexedSeq, context)
if (settings.Ylogcp) {
- Console.println("Classpath built from " + settings.toConciseString)
- Console.println("Defaults: " + PathResolver.Defaults)
- Console.println("Calculated: " + Calculated)
+ Console print f"Classpath built from ${settings.toConciseString} %n"
+ Console print s"Defaults: ${PathResolver.Defaults}"
+ Console print s"Calculated: $Calculated"
val xs = (Calculated.basis drop 2).flatten.distinct
- println("After java boot/extdirs classpath has %d entries:" format xs.size)
- xs foreach (x => println(" " + x))
+ Console print (xs mkLines (s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true))
}
cp
}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 7da9479dab..1d39a59cf4 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.util
+package scala
+package tools.util
import java.net.{ ServerSocket, SocketException, SocketTimeoutException }
import java.io.{ PrintWriter, BufferedReader }
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index d208a9f9c2..3c203e1cf2 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -3,7 +3,7 @@ package scala.tools.util
import scala.tools.nsc.io._
import java.net.URLClassLoader
import scala.collection.JavaConverters._
-
+import scala.language.postfixOps
object VerifyClass {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index beab271a3b..f260ee4093 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -95,7 +95,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
if (!cpsEnabled) return bounds
- val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe)
+ val anyAtCPS = newCpsParamsMarker(NothingTpe, AnyTpe)
if (isFunctionType(tparams.head.owner.tpe_*) || isPartialFunctionType(tparams.head.owner.tpe_*)) {
vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
if (hasCpsParamTypes(targs.last))
@@ -138,7 +138,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
/*
// not precise enough -- still relying on addAnnotations to remove things from ValDef symbols
- if ((mode & TYPEmode) != 0 && (mode & BYVALmode) != 0) {
+ if (mode.inAllModes(TYPEmode | BYVALmode)) {
if (!annots1.isEmpty) {
return true
}
@@ -147,7 +147,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
/*
this interferes with overloading resolution
- if ((mode & BYVALmode) != 0 && tree.tpe <:< pt) {
+ if (mode.inByValMode && tree.tpe <:< pt) {
vprintln("already compatible, can't adapt further")
return false
}
@@ -169,7 +169,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
true
//}
} else false
- } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && mode.inRetMode) {
+ } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && typer.context.inReturnExpr) {
vprintln("checking enclosing method's result type without annotations")
tree.tpe <:< pt.withoutAnnotations
} else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && mode.inByValMode) {
@@ -193,19 +193,15 @@ abstract class CPSAnnotationChecker extends CPSUtils {
vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + mode + " / " + pt)
- val patMode = mode.inPatternMode
- val exprMode = mode.inExprMode
- val byValMode = mode.inByValMode
- val retMode = mode.inRetMode
-
- val annotsTree = cpsParamAnnotation(tree.tpe)
- val annotsExpected = cpsParamAnnotation(pt)
+ val annotsTree = cpsParamAnnotation(tree.tpe)
+ val annotsExpected = cpsParamAnnotation(pt)
+ def isMissingExpectedAnnots = annotsTree.isEmpty && annotsExpected.nonEmpty
// not sure I rephrased this comment correctly:
- // replacing `patMode` in the condition below by `patMode || ((mode & TYPEmode) != 0 && (mode & BYVALmode))`
+ // replacing `mode.inPatternMode` in the condition below by `mode.inPatternMode || mode.inAllModes(TYPEmode | BYVALmode)`
// doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
- if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations
- else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit
+ if (mode.inPatternMode && annotsTree.nonEmpty) tree modifyType removeAllCPSAnnotations
+ else if (mode.typingExprNotValue && !hasPlusMarker(tree.tpe) && isMissingExpectedAnnots) { // shiftUnit
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having any possible annotation
//println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
@@ -217,13 +213,13 @@ abstract class CPSAnnotationChecker extends CPSUtils {
val res = tree modifyType (_ withAnnotations newPlusMarker() :: annotsExpected) // needed for #1807
vprintln("adapted annotations (not by val) of " + tree + " to " + res.tpe)
res
- } else if (exprMode && byValMode && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
+ } else if (mode.typingExprByValue && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having no annotation
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
- } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
+ } else if (typer.context.inReturnExpr && !hasPlusMarker(tree.tpe) && isMissingExpectedAnnots) {
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having any possible annotation
@@ -231,14 +227,14 @@ abstract class CPSAnnotationChecker extends CPSUtils {
// (annotsExpected.nonEmpty == cpsParamAnnotation(pt).nonEmpty)
// note 2: we are not adding the expected cps annotations, since they will be added
// by adaptTypeOfReturn (see below).
- val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
+ val res = tree modifyType (_ withAnnotation newPlusMarker())
vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
res
} else tree
}
/** Returns an adapted type for a return expression if the method's result type (pt) is a CPS type.
- * Otherwise, it returns the `default` type (`typedReturn` passes `NothingClass.tpe`).
+ * Otherwise, it returns the `default` type (`typedReturn` passes `NothingTpe`).
*
* A return expression in a method that has a CPS result type is an error unless the return
* is in tail position. Therefore, we are making sure that only the types of return expressions
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 846ce01953..908ffb3713 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -189,7 +189,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val targettp = transformCPSType(tree.tpe)
val pos = catches.head.pos
- val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass, ThrowableClass.tpe, targettp))
+ val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass, ThrowableTpe, targettp))
val funDef = localTyper.typedPos(pos) {
ValDef(funSym, Match(EmptyTree, catches1))
}
@@ -197,7 +197,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym)))
}
- val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
+ val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableTpe)
import CODE._
// generate a case that is supported directly by the back-end
@@ -207,8 +207,8 @@ abstract class SelectiveCPSTransform extends PluginComponent with
IF ((REF(funSym) DOT nme.isDefinedAt)(REF(exSym))) THEN (REF(funSym) APPLY (REF(exSym))) ELSE Throw(REF(exSym))
)
- val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableClass.tpe, targettp)
- //typedCases(tree, catches, ThrowableClass.tpe, pt)
+ val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableTpe, targettp)
+ //typedCases(tree, catches, ThrowableTpe, pt)
patmatTransformer.transform(localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1))))
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 73aa270b77..2bb1a4da8f 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -11,6 +11,9 @@ IMPORTANT
Preferences/General/Workspace/Linked Resources. The value should be the absolute
path to your scala checkout. All paths in project files are relative to this one,
so nothing will work before you do so.
+Additionally, we start using Maven dependencies (e.g. junit) so you need to define
+`classpath variable` inside Eclipse. Define `M2_REPO` in Java/Build Path/Classpath Variables
+to point to your local Maven repository (e.g. $HOME/.m2/repository).
2. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
JDK. The Scala library uses such APIs, so you'd see this error:
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
new file mode 100644
index 0000000000..718f7b6ece
--- /dev/null
+++ b/src/eclipse/test-junit/.classpath
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="test-junit"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
+ <classpathentry kind="output" path="build-test-junit"/>
+</classpath>
diff --git a/src/eclipse/test-junit/.project b/src/eclipse/test-junit/.project
new file mode 100644
index 0000000000..052b6c1b6f
--- /dev/null
+++ b/src/eclipse/test-junit/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>test-junit</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-test-junit</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/junit/classes</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>test-junit</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/test/junit</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 8dbca6da4b..6578504155 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -5,12 +5,12 @@
*/
package scala.concurrent.forkjoin;
+
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
-import java.util.Random;
import java.util.concurrent.AbstractExecutorService;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
@@ -18,10 +18,357 @@ import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RunnableFuture;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.locks.AbstractQueuedSynchronizer;
-import java.util.concurrent.locks.Condition;
+
+/**
+ * @since 1.8
+ * @author Doug Lea
+ */
+/*public*/ abstract class CountedCompleter<T> extends ForkJoinTask<T> {
+ private static final long serialVersionUID = 5232453752276485070L;
+
+ /** This task's completer, or null if none */
+ final CountedCompleter<?> completer;
+ /** The number of pending tasks until completion */
+ volatile int pending;
+
+ /**
+ * Creates a new CountedCompleter with the given completer
+ * and initial pending count.
+ *
+ * @param completer this task's completer, or {@code null} if none
+ * @param initialPendingCount the initial pending count
+ */
+ protected CountedCompleter(CountedCompleter<?> completer,
+ int initialPendingCount) {
+ this.completer = completer;
+ this.pending = initialPendingCount;
+ }
+
+ /**
+ * Creates a new CountedCompleter with the given completer
+ * and an initial pending count of zero.
+ *
+ * @param completer this task's completer, or {@code null} if none
+ */
+ protected CountedCompleter(CountedCompleter<?> completer) {
+ this.completer = completer;
+ }
+
+ /**
+ * Creates a new CountedCompleter with no completer
+ * and an initial pending count of zero.
+ */
+ protected CountedCompleter() {
+ this.completer = null;
+ }
+
+ /**
+ * The main computation performed by this task.
+ */
+ public abstract void compute();
+
+ /**
+ * Performs an action when method {@link #tryComplete} is invoked
+ * and the pending count is zero, or when the unconditional
+ * method {@link #complete} is invoked. By default, this method
+ * does nothing. You can distinguish cases by checking the
+ * identity of the given caller argument. If not equal to {@code
+ * this}, then it is typically a subtask that may contain results
+ * (and/or links to other results) to combine.
+ *
+ * @param caller the task invoking this method (which may
+ * be this task itself)
+ */
+ public void onCompletion(CountedCompleter<?> caller) {
+ }
+
+ /**
+ * Performs an action when method {@link #completeExceptionally}
+ * is invoked or method {@link #compute} throws an exception, and
+ * this task has not otherwise already completed normally. On
+ * entry to this method, this task {@link
+ * ForkJoinTask#isCompletedAbnormally}. The return value of this
+ * method controls further propagation: If {@code true} and this
+ * task has a completer, then this completer is also completed
+ * exceptionally. The default implementation of this method does
+ * nothing except return {@code true}.
+ *
+ * @param ex the exception
+ * @param caller the task invoking this method (which may
+ * be this task itself)
+ * @return true if this exception should be propagated to this
+ * task's completer, if one exists
+ */
+ public boolean onExceptionalCompletion(Throwable ex, CountedCompleter<?> caller) {
+ return true;
+ }
+
+ /**
+ * Returns the completer established in this task's constructor,
+ * or {@code null} if none.
+ *
+ * @return the completer
+ */
+ public final CountedCompleter<?> getCompleter() {
+ return completer;
+ }
+
+ /**
+ * Returns the current pending count.
+ *
+ * @return the current pending count
+ */
+ public final int getPendingCount() {
+ return pending;
+ }
+
+ /**
+ * Sets the pending count to the given value.
+ *
+ * @param count the count
+ */
+ public final void setPendingCount(int count) {
+ pending = count;
+ }
+
+ /**
+ * Adds (atomically) the given value to the pending count.
+ *
+ * @param delta the value to add
+ */
+ public final void addToPendingCount(int delta) {
+ int c; // note: can replace with intrinsic in jdk8
+ do {} while (!U.compareAndSwapInt(this, PENDING, c = pending, c+delta));
+ }
+
+ /**
+ * Sets (atomically) the pending count to the given count only if
+ * it currently holds the given expected value.
+ *
+ * @param expected the expected value
+ * @param count the new value
+ * @return true if successful
+ */
+ public final boolean compareAndSetPendingCount(int expected, int count) {
+ return U.compareAndSwapInt(this, PENDING, expected, count);
+ }
+
+ /**
+ * If the pending count is nonzero, (atomically) decrements it.
+ *
+ * @return the initial (undecremented) pending count holding on entry
+ * to this method
+ */
+ public final int decrementPendingCountUnlessZero() {
+ int c;
+ do {} while ((c = pending) != 0 &&
+ !U.compareAndSwapInt(this, PENDING, c, c - 1));
+ return c;
+ }
+
+ /**
+ * Returns the root of the current computation; i.e., this
+ * task if it has no completer, else its completer's root.
+ *
+ * @return the root of the current computation
+ */
+ public final CountedCompleter<?> getRoot() {
+ CountedCompleter<?> a = this, p;
+ while ((p = a.completer) != null)
+ a = p;
+ return a;
+ }
+
+ /**
+ * If the pending count is nonzero, decrements the count;
+ * otherwise invokes {@link #onCompletion} and then similarly
+ * tries to complete this task's completer, if one exists,
+ * else marks this task as complete.
+ */
+ public final void tryComplete() {
+ CountedCompleter<?> a = this, s = a;
+ for (int c;;) {
+ if ((c = a.pending) == 0) {
+ a.onCompletion(s);
+ if ((a = (s = a).completer) == null) {
+ s.quietlyComplete();
+ return;
+ }
+ }
+ else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
+ return;
+ }
+ }
+
+ /**
+ * Equivalent to {@link #tryComplete} but does not invoke {@link
+ * #onCompletion} along the completion path: If the pending count
+ * is nonzero, decrements the count; otherwise, similarly tries to
+ * complete this task's completer, if one exists, else marks this
+ * task as complete. This method may be useful in cases where
+ * {@code onCompletion} should not, or need not, be invoked for
+ * each completer in a computation.
+ */
+ public final void propagateCompletion() {
+ CountedCompleter<?> a = this, s = a;
+ for (int c;;) {
+ if ((c = a.pending) == 0) {
+ if ((a = (s = a).completer) == null) {
+ s.quietlyComplete();
+ return;
+ }
+ }
+ else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
+ return;
+ }
+ }
+
+ /**
+ * Regardless of pending count, invokes {@link #onCompletion},
+ * marks this task as complete and further triggers {@link
+ * #tryComplete} on this task's completer, if one exists. The
+ * given rawResult is used as an argument to {@link #setRawResult}
+ * before invoking {@link #onCompletion} or marking this task as
+ * complete; its value is meaningful only for classes overriding
+ * {@code setRawResult}.
+ *
+ * <p>This method may be useful when forcing completion as soon as
+ * any one (versus all) of several subtask results are obtained.
+ * However, in the common (and recommended) case in which {@code
+ * setRawResult} is not overridden, this effect can be obtained
+ * more simply using {@code quietlyCompleteRoot();}.
+ *
+ * @param rawResult the raw result
+ */
+ public void complete(T rawResult) {
+ CountedCompleter<?> p;
+ setRawResult(rawResult);
+ onCompletion(this);
+ quietlyComplete();
+ if ((p = completer) != null)
+ p.tryComplete();
+ }
+
+
+ /**
+ * If this task's pending count is zero, returns this task;
+ * otherwise decrements its pending count and returns {@code
+ * null}. This method is designed to be used with {@link
+ * #nextComplete} in completion traversal loops.
+ *
+ * @return this task, if pending count was zero, else {@code null}
+ */
+ public final CountedCompleter<?> firstComplete() {
+ for (int c;;) {
+ if ((c = pending) == 0)
+ return this;
+ else if (U.compareAndSwapInt(this, PENDING, c, c - 1))
+ return null;
+ }
+ }
+
+ /**
+ * If this task does not have a completer, invokes {@link
+ * ForkJoinTask#quietlyComplete} and returns {@code null}. Or, if
+ * this task's pending count is non-zero, decrements its pending
+ * count and returns {@code null}. Otherwise, returns the
+ * completer. This method can be used as part of a completion
+ * traversal loop for homogeneous task hierarchies:
+ *
+ * <pre> {@code
+ * for (CountedCompleter<?> c = firstComplete();
+ * c != null;
+ * c = c.nextComplete()) {
+ * // ... process c ...
+ * }}</pre>
+ *
+ * @return the completer, or {@code null} if none
+ */
+ public final CountedCompleter<?> nextComplete() {
+ CountedCompleter<?> p;
+ if ((p = completer) != null)
+ return p.firstComplete();
+ else {
+ quietlyComplete();
+ return null;
+ }
+ }
+
+ /**
+ * Equivalent to {@code getRoot().quietlyComplete()}.
+ */
+ public final void quietlyCompleteRoot() {
+ for (CountedCompleter<?> a = this, p;;) {
+ if ((p = a.completer) == null) {
+ a.quietlyComplete();
+ return;
+ }
+ a = p;
+ }
+ }
+
+ /**
+ * Supports ForkJoinTask exception propagation.
+ */
+ void internalPropagateException(Throwable ex) {
+ CountedCompleter<?> a = this, s = a;
+ while (a.onExceptionalCompletion(ex, s) &&
+ (a = (s = a).completer) != null && a.status >= 0)
+ a.recordExceptionalCompletion(ex);
+ }
+
+ /**
+ * Implements execution conventions for CountedCompleters.
+ */
+ protected final boolean exec() {
+ compute();
+ return false;
+ }
+
+ /**
+ * Returns the result of the computation. By default
+ * returns {@code null}, which is appropriate for {@code Void}
+ * actions, but in other cases should be overridden, almost
+ * always to return a field or function of a field that
+ * holds the result upon completion.
+ *
+ * @return the result of the computation
+ */
+ public T getRawResult() { return null; }
+
+ /**
+ * A method that result-bearing CountedCompleters may optionally
+ * use to help maintain result data. By default, does nothing.
+ * Overrides are not recommended. However, if this method is
+ * overridden to update existing objects or fields, then it must
+ * in general be defined to be thread-safe.
+ */
+ protected void setRawResult(T t) { }
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long PENDING;
+ static {
+ try {
+ U = getUnsafe();
+ PENDING = U.objectFieldOffset
+ (CountedCompleter.class.getDeclaredField("pending"));
+ } catch (Exception e) {
+ throw new Error(e);
+ }
+ }
+
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ private static sun.misc.Unsafe getUnsafe() {
+ return scala.concurrent.util.Unsafe.instance;
+ }
+}
/**
* An {@link ExecutorService} for running {@link ForkJoinTask}s.
@@ -41,14 +388,22 @@ import java.util.concurrent.locks.Condition;
* ForkJoinPool}s may also be appropriate for use with event-style
* tasks that are never joined.
*
- * <p>A {@code ForkJoinPool} is constructed with a given target
- * parallelism level; by default, equal to the number of available
- * processors. The pool attempts to maintain enough active (or
- * available) threads by dynamically adding, suspending, or resuming
- * internal worker threads, even if some tasks are stalled waiting to
- * join others. However, no such adjustments are guaranteed in the
- * face of blocked IO or other unmanaged synchronization. The nested
- * {@link ManagedBlocker} interface enables extension of the kinds of
+ * <p>A static {@link #commonPool()} is available and appropriate for
+ * most applications. The common pool is used by any ForkJoinTask that
+ * is not explicitly submitted to a specified pool. Using the common
+ * pool normally reduces resource usage (its threads are slowly
+ * reclaimed during periods of non-use, and reinstated upon subsequent
+ * use).
+ *
+ * <p>For applications that require separate or custom pools, a {@code
+ * ForkJoinPool} may be constructed with a given target parallelism
+ * level; by default, equal to the number of available processors. The
+ * pool attempts to maintain enough active (or available) threads by
+ * dynamically adding, suspending, or resuming internal worker
+ * threads, even if some tasks are stalled waiting to join
+ * others. However, no such adjustments are guaranteed in the face of
+ * blocked I/O or other unmanaged synchronization. The nested {@link
+ * ManagedBlocker} interface enables extension of the kinds of
* synchronization accommodated.
*
* <p>In addition to execution and lifecycle control methods, this
@@ -58,7 +413,7 @@ import java.util.concurrent.locks.Condition;
* {@link #toString} returns indications of pool state in a
* convenient form for informal monitoring.
*
- * <p> As is the case with other ExecutorServices, there are three
+ * <p>As is the case with other ExecutorServices, there are three
* main task execution methods summarized in the following table.
* These are designed to be used primarily by clients not already
* engaged in fork/join computations in the current pool. The main
@@ -93,22 +448,16 @@ import java.util.concurrent.locks.Condition;
* </tr>
* </table>
*
- * <p><b>Sample Usage.</b> Normally a single {@code ForkJoinPool} is
- * used for all parallel task execution in a program or subsystem.
- * Otherwise, use would not usually outweigh the construction and
- * bookkeeping overhead of creating a large set of threads. For
- * example, a common pool could be used for the {@code SortTasks}
- * illustrated in {@link RecursiveAction}. Because {@code
- * ForkJoinPool} uses threads in {@linkplain java.lang.Thread#isDaemon
- * daemon} mode, there is typically no need to explicitly {@link
- * #shutdown} such a pool upon program exit.
- *
- * <pre> {@code
- * static final ForkJoinPool mainPool = new ForkJoinPool();
- * ...
- * public void sort(long[] array) {
- * mainPool.invoke(new SortTask(array, 0, array.length));
- * }}</pre>
+ * <p>The common pool is by default constructed with default
+ * parameters, but these may be controlled by setting three {@link
+ * System#getProperty system properties} with prefix {@code
+ * java.util.concurrent.ForkJoinPool.common}: {@code parallelism} --
+ * an integer greater than zero, {@code threadFactory} -- the class
+ * name of a {@link ForkJoinWorkerThreadFactory}, and {@code
+ * exceptionHandler} -- the class name of a {@link
+ * java.lang.Thread.UncaughtExceptionHandler
+ * Thread.UncaughtExceptionHandler}. Upon any error in establishing
+ * these settings, default parameters are used.
*
* <p><b>Implementation notes</b>: This implementation restricts the
* maximum number of running threads to 32767. Attempts to create
@@ -196,21 +545,24 @@ public class ForkJoinPool extends AbstractExecutorService {
* WorkQueues are also used in a similar way for tasks submitted
* to the pool. We cannot mix these tasks in the same queues used
* for work-stealing (this would contaminate lifo/fifo
- * processing). Instead, we loosely associate submission queues
+ * processing). Instead, we randomly associate submission queues
* with submitting threads, using a form of hashing. The
* ThreadLocal Submitter class contains a value initially used as
* a hash code for choosing existing queues, but may be randomly
* repositioned upon contention with other submitters. In
- * essence, submitters act like workers except that they never
- * take tasks, and they are multiplexed on to a finite number of
- * shared work queues. However, classes are set up so that future
- * extensions could allow submitters to optionally help perform
- * tasks as well. Insertion of tasks in shared mode requires a
- * lock (mainly to protect in the case of resizing) but we use
- * only a simple spinlock (using bits in field runState), because
- * submitters encountering a busy queue move on to try or create
- * other queues -- they block only when creating and registering
- * new queues.
+ * essence, submitters act like workers except that they are
+ * restricted to executing local tasks that they submitted (or in
+ * the case of CountedCompleters, others with the same root task).
+ * However, because most shared/external queue operations are more
+ * expensive than internal, and because, at steady state, external
+ * submitters will compete for CPU with workers, ForkJoinTask.join
+ * and related methods disable them from repeatedly helping to
+ * process tasks if all workers are active. Insertion of tasks in
+ * shared mode requires a lock (mainly to protect in the case of
+ * resizing) but we use only a simple spinlock (using bits in
+ * field qlock), because submitters encountering a busy queue move
+ * on to try or create other queues -- they block only when
+ * creating and registering new queues.
*
* Management
* ==========
@@ -232,23 +584,26 @@ public class ForkJoinPool extends AbstractExecutorService {
* and their negations (used for thresholding) to fit into 16bit
* fields.
*
- * Field "runState" contains 32 bits needed to register and
- * deregister WorkQueues, as well as to enable shutdown. It is
- * only modified under a lock (normally briefly held, but
- * occasionally protecting allocations and resizings) but even
- * when locked remains available to check consistency.
+ * Field "plock" is a form of sequence lock with a saturating
+ * shutdown bit (similarly for per-queue "qlocks"), mainly
+ * protecting updates to the workQueues array, as well as to
+ * enable shutdown. When used as a lock, it is normally only very
+ * briefly held, so is nearly always available after at most a
+ * brief spin, but we use a monitor-based backup strategy to
+ * block when needed.
*
* Recording WorkQueues. WorkQueues are recorded in the
- * "workQueues" array that is created upon pool construction and
- * expanded if necessary. Updates to the array while recording
- * new workers and unrecording terminated ones are protected from
- * each other by a lock but the array is otherwise concurrently
- * readable, and accessed directly. To simplify index-based
- * operations, the array size is always a power of two, and all
- * readers must tolerate null slots. Shared (submission) queues
- * are at even indices, worker queues at odd indices. Grouping
- * them together in this way simplifies and speeds up task
- * scanning.
+ * "workQueues" array that is created upon first use and expanded
+ * if necessary. Updates to the array while recording new workers
+ * and unrecording terminated ones are protected from each other
+ * by a lock but the array is otherwise concurrently readable, and
+ * accessed directly. To simplify index-based operations, the
+ * array size is always a power of two, and all readers must
+ * tolerate null slots. Worker queues are at odd indices. Shared
+ * (submission) queues are at even indices, up to a maximum of 64
+ * slots, to limit growth even if array needs to expand to add
+ * more workers. Grouping them together in this way simplifies and
+ * speeds up task scanning.
*
* All worker thread creation is on-demand, triggered by task
* submissions, replacement of terminated workers, and/or
@@ -309,24 +664,33 @@ public class ForkJoinPool extends AbstractExecutorService {
*
* Signalling. We create or wake up workers only when there
* appears to be at least one task they might be able to find and
- * execute. When a submission is added or another worker adds a
- * task to a queue that previously had fewer than two tasks, they
- * signal waiting workers (or trigger creation of new ones if
- * fewer than the given parallelism level -- see signalWork).
- * These primary signals are buttressed by signals during rescans;
- * together these cover the signals needed in cases when more
- * tasks are pushed but untaken, and improve performance compared
- * to having one thread wake up all workers.
+ * execute. However, many other threads may notice the same task
+ * and each signal to wake up a thread that might take it. So in
+ * general, pools will be over-signalled. When a submission is
+ * added or another worker adds a task to a queue that has fewer
+ * than two tasks, they signal waiting workers (or trigger
+ * creation of new ones if fewer than the given parallelism level
+ * -- signalWork), and may leave a hint to the unparked worker to
+ * help signal others upon wakeup). These primary signals are
+ * buttressed by others (see method helpSignal) whenever other
+ * threads scan for work or do not have a task to process. On
+ * most platforms, signalling (unpark) overhead time is noticeably
+ * long, and the time between signalling a thread and it actually
+ * making progress can be very noticeably long, so it is worth
+ * offloading these delays from critical paths as much as
+ * possible.
*
* Trimming workers. To release resources after periods of lack of
* use, a worker starting to wait when the pool is quiescent will
- * time out and terminate if the pool has remained quiescent for
- * SHRINK_RATE nanosecs. This will slowly propagate, eventually
- * terminating all workers after long periods of non-use.
+ * time out and terminate if the pool has remained quiescent for a
+ * given period -- a short period if there are more threads than
+ * parallelism, longer as the number of threads decreases. This
+ * will slowly propagate, eventually terminating all workers after
+ * periods of non-use.
*
* Shutdown and Termination. A call to shutdownNow atomically sets
- * a runState bit and then (non-atomically) sets each worker's
- * runState status, cancels all unprocessed tasks, and wakes up
+ * a plock bit and then (non-atomically) sets each worker's
+ * qlock status, cancels all unprocessed tasks, and wakes up
* all waiting workers. Detecting whether termination should
* commence after a non-abrupt shutdown() call requires more work
* and bookkeeping. We need consensus about quiescence (i.e., that
@@ -354,13 +718,13 @@ public class ForkJoinPool extends AbstractExecutorService {
* method tryCompensate() may create or re-activate a spare
* thread to compensate for blocked joiners until they unblock.
*
- * A third form (implemented in tryRemoveAndExec and
- * tryPollForAndExec) amounts to helping a hypothetical
- * compensator: If we can readily tell that a possible action of a
- * compensator is to steal and execute the task being joined, the
- * joining thread can do so directly, without the need for a
- * compensation thread (although at the expense of larger run-time
- * stacks, but the tradeoff is typically worthwhile).
+ * A third form (implemented in tryRemoveAndExec) amounts to
+ * helping a hypothetical compensator: If we can readily tell that
+ * a possible action of a compensator is to steal and execute the
+ * task being joined, the joining thread can do so directly,
+ * without the need for a compensation thread (although at the
+ * expense of larger run-time stacks, but the tradeoff is
+ * typically worthwhile).
*
* The ManagedBlocker extension API can't use helping so relies
* only on compensation in method awaitBlocker.
@@ -382,12 +746,12 @@ public class ForkJoinPool extends AbstractExecutorService {
* steals, rather than use per-task bookkeeping. This sometimes
* requires a linear scan of workQueues array to locate stealers,
* but often doesn't because stealers leave hints (that may become
- * stale/wrong) of where to locate them. A stealHint is only a
- * hint because a worker might have had multiple steals and the
- * hint records only one of them (usually the most current).
- * Hinting isolates cost to when it is needed, rather than adding
- * to per-task overhead. (2) It is "shallow", ignoring nesting
- * and potentially cyclic mutual steals. (3) It is intentionally
+ * stale/wrong) of where to locate them. It is only a hint
+ * because a worker might have had multiple steals and the hint
+ * records only one of them (usually the most current). Hinting
+ * isolates cost to when it is needed, rather than adding to
+ * per-task overhead. (2) It is "shallow", ignoring nesting and
+ * potentially cyclic mutual steals. (3) It is intentionally
* racy: field currentJoin is updated only while actively joining,
* which means that we miss links in the chain during long-lived
* tasks, GC stalls etc (which is OK since blocking in such cases
@@ -395,6 +759,12 @@ public class ForkJoinPool extends AbstractExecutorService {
* to find work (see MAX_HELP) and fall back to suspending the
* worker and if necessary replacing it with another.
*
+ * Helping actions for CountedCompleters are much simpler: Method
+ * helpComplete can take and execute any task with the same root
+ * as the task being waited on. However, this still entails some
+ * traversal of completer chains, so is less efficient than using
+ * CountedCompleters without explicit joins.
+ *
* It is impossible to keep exactly the target parallelism number
* of threads running at any given time. Determining the
* existence of conservatively safe helping targets, the
@@ -416,30 +786,41 @@ public class ForkJoinPool extends AbstractExecutorService {
* intractable) game with an opponent that may choose the worst
* (for us) active thread to stall at any time. We take several
* precautions to bound losses (and thus bound gains), mainly in
- * methods tryCompensate and awaitJoin: (1) We only try
- * compensation after attempting enough helping steps (measured
- * via counting and timing) that we have already consumed the
- * estimated cost of creating and activating a new thread. (2) We
- * allow up to 50% of threads to be blocked before initially
- * adding any others, and unless completely saturated, check that
- * some work is available for a new worker before adding. Also, we
- * create up to only 50% more threads until entering a mode that
- * only adds a thread if all others are possibly blocked. All
- * together, this means that we might be half as fast to react,
- * and create half as many threads as possible in the ideal case,
- * but present vastly fewer anomalies in all other cases compared
- * to both more aggressive and more conservative alternatives.
- *
- * Style notes: There is a lot of representation-level coupling
- * among classes ForkJoinPool, ForkJoinWorkerThread, and
- * ForkJoinTask. The fields of WorkQueue maintain data structures
- * managed by ForkJoinPool, so are directly accessed. There is
- * little point trying to reduce this, since any associated future
- * changes in representations will need to be accompanied by
- * algorithmic changes anyway. Several methods intrinsically
- * sprawl because they must accumulate sets of consistent reads of
- * volatiles held in local variables. Methods signalWork() and
- * scan() are the main bottlenecks, so are especially heavily
+ * methods tryCompensate and awaitJoin.
+ *
+ * Common Pool
+ * ===========
+ *
+ * The static common Pool always exists after static
+ * initialization. Since it (or any other created pool) need
+ * never be used, we minimize initial construction overhead and
+ * footprint to the setup of about a dozen fields, with no nested
+ * allocation. Most bootstrapping occurs within method
+ * fullExternalPush during the first submission to the pool.
+ *
+ * When external threads submit to the common pool, they can
+ * perform some subtask processing (see externalHelpJoin and
+ * related methods). We do not need to record whether these
+ * submissions are to the common pool -- if not, externalHelpJoin
+ * returns quickly (at the most helping to signal some common pool
+ * workers). These submitters would otherwise be blocked waiting
+ * for completion, so the extra effort (with liberally sprinkled
+ * task status checks) in inapplicable cases amounts to an odd
+ * form of limited spin-wait before blocking in ForkJoinTask.join.
+ *
+ * Style notes
+ * ===========
+ *
+ * There is a lot of representation-level coupling among classes
+ * ForkJoinPool, ForkJoinWorkerThread, and ForkJoinTask. The
+ * fields of WorkQueue maintain data structures managed by
+ * ForkJoinPool, so are directly accessed. There is little point
+ * trying to reduce this, since any associated future changes in
+ * representations will need to be accompanied by algorithmic
+ * changes anyway. Several methods intrinsically sprawl because
+ * they must accumulate sets of consistent reads of volatiles held
+ * in local variables. Methods signalWork() and scan() are the
+ * main bottlenecks, so are especially heavily
* micro-optimized/mangled. There are lots of inline assignments
* (of form "while ((local = field) != 0)") which are usually the
* simplest way to ensure the required read orderings (which are
@@ -447,7 +828,8 @@ public class ForkJoinPool extends AbstractExecutorService {
* declarations of these locals at the heads of methods or blocks.
* There are several occurrences of the unusual "do {} while
* (!cas...)" which is the simplest way to force an update of a
- * CAS'ed variable. There are also other coding oddities that help
+ * CAS'ed variable. There are also other coding oddities (including
+ * several unnecessary-looking hoisted null checks) that help
* some methods perform reasonably even when interpreted (not
* compiled).
*
@@ -496,34 +878,31 @@ public class ForkJoinPool extends AbstractExecutorService {
* Default ForkJoinWorkerThreadFactory implementation; creates a
* new ForkJoinWorkerThread.
*/
- static class DefaultForkJoinWorkerThreadFactory
+ static final class DefaultForkJoinWorkerThreadFactory
implements ForkJoinWorkerThreadFactory {
- public ForkJoinWorkerThread newThread(ForkJoinPool pool) {
+ public final ForkJoinWorkerThread newThread(ForkJoinPool pool) {
return new ForkJoinWorkerThread(pool);
}
}
/**
- * A simple non-reentrant lock used for exclusion when managing
- * queues and workers. We use a custom lock so that we can readily
- * probe lock state in constructions that check among alternative
- * actions. The lock is normally only very briefly held, and
- * sometimes treated as a spinlock, but other usages block to
- * reduce overall contention in those cases where locked code
- * bodies perform allocation/resizing.
+ * Per-thread records for threads that submit to pools. Currently
+ * holds only pseudo-random seed / index that is used to choose
+ * submission queues in method externalPush. In the future, this may
+ * also incorporate a means to implement different task rejection
+ * and resubmission policies.
+ *
+ * Seeds for submitters and workers/workQueues work in basically
+ * the same way but are initialized and updated using slightly
+ * different mechanics. Both are initialized using the same
+ * approach as in class ThreadLocal, where successive values are
+ * unlikely to collide with previous values. Seeds are then
+ * randomly modified upon collisions using xorshifts, which
+ * requires a non-zero seed.
*/
- static final class Mutex extends AbstractQueuedSynchronizer {
- public final boolean tryAcquire(int ignore) {
- return compareAndSetState(0, 1);
- }
- public final boolean tryRelease(int ignore) {
- setState(0);
- return true;
- }
- public final void lock() { acquire(0); }
- public final void unlock() { release(0); }
- public final boolean isHeldExclusively() { return getState() == 1; }
- public final Condition newCondition() { return new ConditionObject(); }
+ static final class Submitter {
+ int seed;
+ Submitter(int s) { seed = s; }
}
/**
@@ -533,6 +912,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* actually do anything beyond having a unique identity.
*/
static final class EmptyTask extends ForkJoinTask<Void> {
+ private static final long serialVersionUID = -7721805057305804111L;
EmptyTask() { status = ForkJoinTask.NORMAL; } // force done
public final Void getRawResult() { return null; }
public final void setRawResult(Void x) {}
@@ -553,27 +933,31 @@ public class ForkJoinPool extends AbstractExecutorService {
*
* Field "top" is the index (mod array.length) of the next queue
* slot to push to or pop from. It is written only by owner thread
- * for push, or under lock for trySharedPush, and accessed by
- * other threads only after reading (volatile) base. Both top and
- * base are allowed to wrap around on overflow, but (top - base)
- * (or more commonly -(base - top) to force volatile read of base
- * before top) still estimates size.
+ * for push, or under lock for external/shared push, and accessed
+ * by other threads only after reading (volatile) base. Both top
+ * and base are allowed to wrap around on overflow, but (top -
+ * base) (or more commonly -(base - top) to force volatile read of
+ * base before top) still estimates size. The lock ("qlock") is
+ * forced to -1 on termination, causing all further lock attempts
+ * to fail. (Note: we don't need CAS for termination state because
+ * upon pool shutdown, all shared-queues will stop being used
+ * anyway.) Nearly all lock bodies are set up so that exceptions
+ * within lock bodies are "impossible" (modulo JVM errors that
+ * would cause failure anyway.)
*
* The array slots are read and written using the emulation of
* volatiles/atomics provided by Unsafe. Insertions must in
* general use putOrderedObject as a form of releasing store to
* ensure that all writes to the task object are ordered before
- * its publication in the queue. (Although we can avoid one case
- * of this when locked in trySharedPush.) All removals entail a
- * CAS to null. The array is always a power of two. To ensure
- * safety of Unsafe array operations, all accesses perform
- * explicit null checks and implicit bounds checks via
- * power-of-two masking.
+ * its publication in the queue. All removals entail a CAS to
+ * null. The array is always a power of two. To ensure safety of
+ * Unsafe array operations, all accesses perform explicit null
+ * checks and implicit bounds checks via power-of-two masking.
*
* In addition to basic queuing support, this class contains
* fields described elsewhere to control execution. It turns out
- * to work better memory-layout-wise to include them in this
- * class rather than a separate class.
+ * to work better memory-layout-wise to include them in this class
+ * rather than a separate class.
*
* Performance on most platforms is very sensitive to placement of
* instances of both WorkQueues and their arrays -- we absolutely
@@ -587,10 +971,10 @@ public class ForkJoinPool extends AbstractExecutorService {
* trades off slightly slower average field access for the sake of
* avoiding really bad worst-case access. (Until better JVM
* support is in place, this padding is dependent on transient
- * properties of JVM field layout rules.) We also take care in
+ * properties of JVM field layout rules.) We also take care in
* allocating, sizing and resizing the array. Non-shared queue
- * arrays are initialized (via method growArray) by workers before
- * use. Others are allocated on first use.
+ * arrays are initialized by workers before use. Others are
+ * allocated on first use.
*/
static final class WorkQueue {
/**
@@ -613,16 +997,17 @@ public class ForkJoinPool extends AbstractExecutorService {
*/
static final int MAXIMUM_QUEUE_CAPACITY = 1 << 26; // 64M
- volatile long totalSteals; // cumulative number of steals
+ // Heuristic padding to ameliorate unfortunate memory placements
+ volatile long pad00, pad01, pad02, pad03, pad04, pad05, pad06;
+
int seed; // for random scanning; initialize nonzero
volatile int eventCount; // encoded inactivation count; < 0 if inactive
int nextWait; // encoded record of next event waiter
- int rescans; // remaining scans until block
- int nsteals; // top-level task executions since last idle
- final int mode; // lifo, fifo, or shared
+ int hint; // steal or signal hint (index)
int poolIndex; // index of this queue in pool (or 0)
- int stealHint; // index of most recent known stealer
- volatile int runState; // 1: locked, -1: terminate; else 0
+ final int mode; // 0: lifo, > 0: fifo, < 0: shared
+ int nsteals; // number of steals
+ volatile int qlock; // 1: locked, -1: terminate; else 0
volatile int base; // index of next slot for poll
int top; // index of next slot for push
ForkJoinTask<?>[] array; // the elements (initially unallocated)
@@ -631,14 +1016,16 @@ public class ForkJoinPool extends AbstractExecutorService {
volatile Thread parker; // == owner during call to park; else null
volatile ForkJoinTask<?> currentJoin; // task being joined in awaitJoin
ForkJoinTask<?> currentSteal; // current non-local task being executed
- // Heuristic padding to ameliorate unfortunate memory placements
- Object p00, p01, p02, p03, p04, p05, p06, p07;
- Object p08, p09, p0a, p0b, p0c, p0d, p0e;
- WorkQueue(ForkJoinPool pool, ForkJoinWorkerThread owner, int mode) {
- this.mode = mode;
+ volatile Object pad10, pad11, pad12, pad13, pad14, pad15, pad16, pad17;
+ volatile Object pad18, pad19, pad1a, pad1b, pad1c, pad1d;
+
+ WorkQueue(ForkJoinPool pool, ForkJoinWorkerThread owner, int mode,
+ int seed) {
this.pool = pool;
this.owner = owner;
+ this.mode = mode;
+ this.seed = seed;
// Place indices in the center of array (that is not yet allocated)
base = top = INITIAL_QUEUE_CAPACITY >>> 1;
}
@@ -651,7 +1038,7 @@ public class ForkJoinPool extends AbstractExecutorService {
return (n >= 0) ? 0 : -n; // ignore transient negative
}
- /**
+ /**
* Provides a more accurate estimate of whether this queue has
* any tasks than does queueSize, by checking whether a
* near-empty queue has at least one unclaimed task.
@@ -663,62 +1050,63 @@ public class ForkJoinPool extends AbstractExecutorService {
(n == -1 &&
((a = array) == null ||
(m = a.length - 1) < 0 ||
- U.getObjectVolatile
- (a, ((m & (s - 1)) << ASHIFT) + ABASE) == null)));
+ U.getObject
+ (a, (long)((m & (s - 1)) << ASHIFT) + ABASE) == null)));
}
/**
- * Pushes a task. Call only by owner in unshared queues.
+ * Pushes a task. Call only by owner in unshared queues. (The
+ * shared-queue version is embedded in method externalPush.)
*
* @param task the task. Caller must ensure non-null.
- * @throw RejectedExecutionException if array cannot be resized
+ * @throws RejectedExecutionException if array cannot be resized
*/
final void push(ForkJoinTask<?> task) {
ForkJoinTask<?>[] a; ForkJoinPool p;
int s = top, m, n;
if ((a = array) != null) { // ignore if queue removed
- U.putOrderedObject
- (a, (((m = a.length - 1) & s) << ASHIFT) + ABASE, task);
+ int j = (((m = a.length - 1) & s) << ASHIFT) + ABASE;
+ U.putOrderedObject(a, j, task);
if ((n = (top = s + 1) - base) <= 2) {
if ((p = pool) != null)
- p.signalWork();
+ p.signalWork(this);
}
else if (n >= m)
- growArray(true);
+ growArray();
}
}
- /**
- * Pushes a task if lock is free and array is either big
- * enough or can be resized to be big enough.
- *
- * @param task the task. Caller must ensure non-null.
- * @return true if submitted
+ /**
+ * Initializes or doubles the capacity of array. Call either
+ * by owner or with lock held -- it is OK for base, but not
+ * top, to move while resizings are in progress.
*/
- final boolean trySharedPush(ForkJoinTask<?> task) {
- boolean submitted = false;
- if (runState == 0 && U.compareAndSwapInt(this, RUNSTATE, 0, 1)) {
- ForkJoinTask<?>[] a = array;
- int s = top;
- try {
- if ((a != null && a.length > s + 1 - base) ||
- (a = growArray(false)) != null) { // must presize
- int j = (((a.length - 1) & s) << ASHIFT) + ABASE;
- U.putObject(a, (long)j, task); // don't need "ordered"
- top = s + 1;
- submitted = true;
- }
- } finally {
- runState = 0; // unlock
- }
+ final ForkJoinTask<?>[] growArray() {
+ ForkJoinTask<?>[] oldA = array;
+ int size = oldA != null ? oldA.length << 1 : INITIAL_QUEUE_CAPACITY;
+ if (size > MAXIMUM_QUEUE_CAPACITY)
+ throw new RejectedExecutionException("Queue capacity exceeded");
+ int oldMask, t, b;
+ ForkJoinTask<?>[] a = array = new ForkJoinTask<?>[size];
+ if (oldA != null && (oldMask = oldA.length - 1) >= 0 &&
+ (t = top) - (b = base) > 0) {
+ int mask = size - 1;
+ do {
+ ForkJoinTask<?> x;
+ int oldj = ((b & oldMask) << ASHIFT) + ABASE;
+ int j = ((b & mask) << ASHIFT) + ABASE;
+ x = (ForkJoinTask<?>)U.getObjectVolatile(oldA, oldj);
+ if (x != null &&
+ U.compareAndSwapObject(oldA, oldj, x, null))
+ U.putObjectVolatile(a, j, x);
+ } while (++b != t);
}
- return submitted;
+ return a;
}
/**
* Takes next task, if one exists, in LIFO order. Call only
- * by owner in unshared queues. (We do not have a shared
- * version of this method because it is never needed.)
+ * by owner in unshared queues.
*/
final ForkJoinTask<?> pop() {
ForkJoinTask<?>[] a; ForkJoinTask<?> t; int m;
@@ -773,7 +1161,7 @@ public class ForkJoinPool extends AbstractExecutorService {
else if (base == b) {
if (b + 1 == top)
break;
- Thread.yield(); // wait for lagging update
+ Thread.yield(); // wait for lagging update (very rare)
}
}
return null;
@@ -800,6 +1188,7 @@ public class ForkJoinPool extends AbstractExecutorService {
/**
* Pops the given task only if it is at the current top.
+ * (A shared version is available only via FJP.tryExternalUnpush)
*/
final boolean tryUnpush(ForkJoinTask<?> t) {
ForkJoinTask<?>[] a; int s;
@@ -813,57 +1202,6 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
- * Polls the given task only if it is at the current base.
- */
- final boolean pollFor(ForkJoinTask<?> task) {
- ForkJoinTask<?>[] a; int b;
- if ((b = base) - top < 0 && (a = array) != null) {
- int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
- if (U.getObjectVolatile(a, j) == task && base == b &&
- U.compareAndSwapObject(a, j, task, null)) {
- base = b + 1;
- return true;
- }
- }
- return false;
- }
-
- /**
- * Initializes or doubles the capacity of array. Call either
- * by owner or with lock held -- it is OK for base, but not
- * top, to move while resizings are in progress.
- *
- * @param rejectOnFailure if true, throw exception if capacity
- * exceeded (relayed ultimately to user); else return null.
- */
- final ForkJoinTask<?>[] growArray(boolean rejectOnFailure) {
- ForkJoinTask<?>[] oldA = array;
- int size = oldA != null ? oldA.length << 1 : INITIAL_QUEUE_CAPACITY;
- if (size <= MAXIMUM_QUEUE_CAPACITY) {
- int oldMask, t, b;
- ForkJoinTask<?>[] a = array = new ForkJoinTask<?>[size];
- if (oldA != null && (oldMask = oldA.length - 1) >= 0 &&
- (t = top) - (b = base) > 0) {
- int mask = size - 1;
- do {
- ForkJoinTask<?> x;
- int oldj = ((b & oldMask) << ASHIFT) + ABASE;
- int j = ((b & mask) << ASHIFT) + ABASE;
- x = (ForkJoinTask<?>)U.getObjectVolatile(oldA, oldj);
- if (x != null &&
- U.compareAndSwapObject(oldA, oldj, x, null))
- U.putObjectVolatile(a, j, x);
- } while (++b != t);
- }
- return a;
- }
- else if (!rejectOnFailure)
- return null;
- else
- throw new RejectedExecutionException("Queue capacity exceeded");
- }
-
- /**
* Removes and cancels all known tasks, ignoring any exceptions.
*/
final void cancelAll() {
@@ -887,7 +1225,7 @@ public class ForkJoinPool extends AbstractExecutorService {
return seed = r ^= r << 5;
}
- // Execution methods
+ // Specialized execution methods
/**
* Pops and runs tasks until empty.
@@ -916,16 +1254,14 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
- * If present, removes from queue and executes the given task, or
- * any other cancelled task. Returns (true) immediately on any CAS
+ * If present, removes from queue and executes the given task,
+ * or any other cancelled task. Returns (true) on any CAS
* or consistency check failure so caller can retry.
*
- * @return 0 if no progress can be made, else positive
- * (this unusual convention simplifies use with tryHelpStealer.)
+ * @return false if no progress can be made, else true
*/
- final int tryRemoveAndExec(ForkJoinTask<?> task) {
- int stat = 1;
- boolean removed = false, empty = true;
+ final boolean tryRemoveAndExec(ForkJoinTask<?> task) {
+ boolean stat = true, removed = false, empty = true;
ForkJoinTask<?>[] a; int m, s, b, n;
if ((a = array) != null && (m = a.length - 1) >= 0 &&
(n = (s = top) - (b = base)) > 0) {
@@ -955,7 +1291,7 @@ public class ForkJoinPool extends AbstractExecutorService {
}
if (--n == 0) {
if (!empty && base == b)
- stat = 0;
+ stat = false;
break;
}
}
@@ -966,21 +1302,49 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
+ * Polls for and executes the given task or any other task in
+ * its CountedCompleter computation.
+ */
+ final boolean pollAndExecCC(ForkJoinTask<?> root) {
+ ForkJoinTask<?>[] a; int b; Object o;
+ outer: while ((b = base) - top < 0 && (a = array) != null) {
+ long j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ if ((o = U.getObject(a, j)) == null ||
+ !(o instanceof CountedCompleter))
+ break;
+ for (CountedCompleter<?> t = (CountedCompleter<?>)o, r = t;;) {
+ if (r == root) {
+ if (base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ t.doExec();
+ return true;
+ }
+ else
+ break; // restart
+ }
+ if ((r = r.completer) == null)
+ break outer; // not part of root computation
+ }
+ }
+ return false;
+ }
+
+ /**
* Executes a top-level task and any local tasks remaining
* after execution.
*/
final void runTask(ForkJoinTask<?> t) {
if (t != null) {
- currentSteal = t;
- t.doExec();
- if (top != base) { // process remaining local tasks
+ (currentSteal = t).doExec();
+ currentSteal = null;
+ ++nsteals;
+ if (base - top < 0) { // process remaining local tasks
if (mode == 0)
popAndExecAll();
else
pollAndExecAll();
}
- ++nsteals;
- currentSteal = null;
}
}
@@ -990,8 +1354,7 @@ public class ForkJoinPool extends AbstractExecutorService {
final void runSubtask(ForkJoinTask<?> t) {
if (t != null) {
ForkJoinTask<?> ps = currentSteal;
- currentSteal = t;
- t.doExec();
+ (currentSteal = t).doExec();
currentSteal = ps;
}
}
@@ -1008,74 +1371,29 @@ public class ForkJoinPool extends AbstractExecutorService {
s != Thread.State.TIMED_WAITING);
}
- /**
- * If this owned and is not already interrupted, try to
- * interrupt and/or unpark, ignoring exceptions.
- */
- final void interruptOwner() {
- Thread wt, p;
- if ((wt = owner) != null && !wt.isInterrupted()) {
- try {
- wt.interrupt();
- } catch (SecurityException ignore) {
- }
- }
- if ((p = parker) != null)
- U.unpark(p);
- }
-
// Unsafe mechanics
private static final sun.misc.Unsafe U;
- private static final long RUNSTATE;
+ private static final long QLOCK;
private static final int ABASE;
private static final int ASHIFT;
static {
- int s;
try {
U = getUnsafe();
Class<?> k = WorkQueue.class;
Class<?> ak = ForkJoinTask[].class;
- RUNSTATE = U.objectFieldOffset
- (k.getDeclaredField("runState"));
+ QLOCK = U.objectFieldOffset
+ (k.getDeclaredField("qlock"));
ABASE = U.arrayBaseOffset(ak);
- s = U.arrayIndexScale(ak);
+ int scale = U.arrayIndexScale(ak);
+ if ((scale & (scale - 1)) != 0)
+ throw new Error("data type scale not a power of two");
+ ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
} catch (Exception e) {
throw new Error(e);
}
- if ((s & (s-1)) != 0)
- throw new Error("data type scale not a power of two");
- ASHIFT = 31 - Integer.numberOfLeadingZeros(s);
- }
- }
- /**
- * Per-thread records for threads that submit to pools. Currently
- * holds only pseudo-random seed / index that is used to choose
- * submission queues in method doSubmit. In the future, this may
- * also incorporate a means to implement different task rejection
- * and resubmission policies.
- *
- * Seeds for submitters and workers/workQueues work in basically
- * the same way but are initialized and updated using slightly
- * different mechanics. Both are initialized using the same
- * approach as in class ThreadLocal, where successive values are
- * unlikely to collide with previous values. This is done during
- * registration for workers, but requires a separate AtomicInteger
- * for submitters. Seeds are then randomly modified upon
- * collisions using xorshifts, which requires a non-zero seed.
- */
- static final class Submitter {
- int seed;
- Submitter() {
- int s = nextSubmitterSeed.getAndAdd(SEED_INCREMENT);
- seed = (s == 0) ? 1 : s; // ensure non-zero
}
}
- /** ThreadLocal class for Submitters */
- static final class ThreadSubmitter extends ThreadLocal<Submitter> {
- public Submitter initialValue() { return new Submitter(); }
- }
-
// static fields (initialized in static initializer below)
/**
@@ -1086,15 +1404,13 @@ public class ForkJoinPool extends AbstractExecutorService {
defaultForkJoinWorkerThreadFactory;
/**
- * Generator for assigning sequence numbers as pool names.
- */
- private static final AtomicInteger poolNumberGenerator;
-
- /**
- * Generator for initial hashes/seeds for submitters. Accessed by
- * Submitter class constructor.
+ * Per-thread submission bookkeeping. Shared across all pools
+ * to reduce ThreadLocal pollution and because random motion
+ * to avoid contention in one pool is likely to hold for others.
+ * Lazily initialized on first submission (but null-checked
+ * in other contexts to avoid unnecessary initialization).
*/
- static final AtomicInteger nextSubmitterSeed;
+ static final ThreadLocal<Submitter> submitters;
/**
* Permission required for callers of methods that may start or
@@ -1103,37 +1419,56 @@ public class ForkJoinPool extends AbstractExecutorService {
private static final RuntimePermission modifyThreadPermission;
/**
- * Per-thread submission bookeeping. Shared across all pools
- * to reduce ThreadLocal pollution and because random motion
- * to avoid contention in one pool is likely to hold for others.
+ * Common (static) pool. Non-null for public use unless a static
+ * construction exception, but internal usages null-check on use
+ * to paranoically avoid potential initialization circularities
+ * as well as to simplify generated code.
*/
- private static final ThreadSubmitter submitters;
+ static final ForkJoinPool common;
+
+ /**
+ * Common pool parallelism. Must equal common.parallelism.
+ */
+ static final int commonParallelism;
+
+ /**
+ * Sequence number for creating workerNamePrefix.
+ */
+ private static int poolNumberSequence;
+
+ /**
+ * Returns the next sequence number. We don't expect this to
+ * ever contend, so use simple builtin sync.
+ */
+ private static final synchronized int nextPoolId() {
+ return ++poolNumberSequence;
+ }
// static constants
/**
- * The wakeup interval (in nanoseconds) for a worker waiting for a
- * task when the pool is quiescent to instead try to shrink the
- * number of workers. The exact value does not matter too
- * much. It must be short enough to release resources during
- * sustained periods of idleness, but not so short that threads
- * are continually re-created.
+ * Initial timeout value (in nanoseconds) for the thread
+ * triggering quiescence to park waiting for new work. On timeout,
+ * the thread will instead try to shrink the number of
+ * workers. The value should be large enough to avoid overly
+ * aggressive shrinkage during most transient stalls (long GCs
+ * etc).
+ */
+ private static final long IDLE_TIMEOUT = 2000L * 1000L * 1000L; // 2sec
+
+ /**
+ * Timeout value when there are more threads than parallelism level
*/
- private static final long SHRINK_RATE =
- 4L * 1000L * 1000L * 1000L; // 4 seconds
+ private static final long FAST_IDLE_TIMEOUT = 200L * 1000L * 1000L;
/**
- * The timeout value for attempted shrinkage, includes
- * some slop to cope with system timer imprecision.
+ * Tolerance for idle timeouts, to cope with timer undershoots
*/
- private static final long SHRINK_TIMEOUT = SHRINK_RATE - (SHRINK_RATE / 10);
+ private static final long TIMEOUT_SLOP = 2000000L;
/**
* The maximum stolen->joining link depth allowed in method
- * tryHelpStealer. Must be a power of two. This value also
- * controls the maximum number of times to try to help join a task
- * without any apparent progress or change in pool state before
- * giving up and blocking (see awaitJoin). Depths for legitimate
+ * tryHelpStealer. Must be a power of two. Depths for legitimate
* chains are unbounded, but we use a fixed constant to avoid
* (otherwise unchecked) cycles and to bound staleness of
* traversal parameters at the expense of sometimes blocking when
@@ -1142,22 +1477,12 @@ public class ForkJoinPool extends AbstractExecutorService {
private static final int MAX_HELP = 64;
/**
- * Secondary time-based bound (in nanosecs) for helping attempts
- * before trying compensated blocking in awaitJoin. Used in
- * conjunction with MAX_HELP to reduce variance due to different
- * polling rates associated with different helping options. The
- * value should roughly approximate the time required to create
- * and/or activate a worker thread.
- */
- private static final long COMPENSATION_DELAY = 1L << 18; // ~0.25 millisec
-
- /**
* Increment for seed generators. See class ThreadLocal for
* explanation.
*/
private static final int SEED_INCREMENT = 0x61c88647;
- /**
+ /*
* Bits and masks for control variables
*
* Field ctl is a long packed with:
@@ -1185,14 +1510,14 @@ public class ForkJoinPool extends AbstractExecutorService {
* scan for them to avoid queuing races. Note however that
* eventCount updates lag releases so usage requires care.
*
- * Field runState is an int packed with:
+ * Field plock is an int packed with:
* SHUTDOWN: true if shutdown is enabled (1 bit)
- * SEQ: a sequence number updated upon (de)registering workers (30 bits)
- * INIT: set true after workQueues array construction (1 bit)
+ * SEQ: a sequence lock, with PL_LOCK bit set if locked (30 bits)
+ * SIGNAL: set when threads may be waiting on the lock (1 bit)
*
* The sequence number enables simple consistency checks:
* Staleness of read-only operations on the workQueues array can
- * be checked by comparing runState before vs after the reads.
+ * be checked by comparing plock before vs after the reads.
*/
// bit positions/shifts for fields
@@ -1204,7 +1529,8 @@ public class ForkJoinPool extends AbstractExecutorService {
// bounds
private static final int SMASK = 0xffff; // short bits
private static final int MAX_CAP = 0x7fff; // max #workers - 1
- private static final int SQMASK = 0xfffe; // even short bits
+ private static final int EVENMASK = 0xfffe; // even short bits
+ private static final int SQMASK = 0x007e; // max 64 (even) slots
private static final int SHORT_SIGN = 1 << 15;
private static final int INT_SIGN = 1 << 31;
@@ -1229,91 +1555,164 @@ public class ForkJoinPool extends AbstractExecutorService {
private static final int E_MASK = 0x7fffffff; // no STOP_BIT
private static final int E_SEQ = 1 << EC_SHIFT;
- // runState bits
+ // plock bits
private static final int SHUTDOWN = 1 << 31;
+ private static final int PL_LOCK = 2;
+ private static final int PL_SIGNAL = 1;
+ private static final int PL_SPINS = 1 << 8;
// access mode for WorkQueue
static final int LIFO_QUEUE = 0;
static final int FIFO_QUEUE = 1;
static final int SHARED_QUEUE = -1;
+ // bounds for #steps in scan loop -- must be power 2 minus 1
+ private static final int MIN_SCAN = 0x1ff; // cover estimation slop
+ private static final int MAX_SCAN = 0x1ffff; // 4 * max workers
+
// Instance fields
/*
- * Field layout order in this class tends to matter more than one
- * would like. Runtime layout order is only loosely related to
+ * Field layout of this class tends to matter more than one would
+ * like. Runtime layout order is only loosely related to
* declaration order and may differ across JVMs, but the following
* empirically works OK on current JVMs.
*/
+ // Heuristic padding to ameliorate unfortunate memory placements
+ volatile long pad00, pad01, pad02, pad03, pad04, pad05, pad06;
+
+ volatile long stealCount; // collects worker counts
volatile long ctl; // main pool control
- final int parallelism; // parallelism level
- final int localMode; // per-worker scheduling mode
- final int submitMask; // submit queue index bound
- int nextSeed; // for initializing worker seeds
- volatile int runState; // shutdown status and seq
+ volatile int plock; // shutdown status and seqLock
+ volatile int indexSeed; // worker/submitter index seed
+ final int config; // mode and parallelism level
WorkQueue[] workQueues; // main registry
- final Mutex lock; // for registration
- final Condition termination; // for awaitTermination
- final ForkJoinWorkerThreadFactory factory; // factory for new workers
+ final ForkJoinWorkerThreadFactory factory;
final Thread.UncaughtExceptionHandler ueh; // per-worker UEH
- final AtomicLong stealCount; // collect counts when terminated
- final AtomicInteger nextWorkerNumber; // to create worker name string
final String workerNamePrefix; // to create worker name string
- // Creating, registering, and deregistering workers
+ volatile Object pad10, pad11, pad12, pad13, pad14, pad15, pad16, pad17;
+ volatile Object pad18, pad19, pad1a, pad1b;
+
+ /**
+ * Acquires the plock lock to protect worker array and related
+ * updates. This method is called only if an initial CAS on plock
+ * fails. This acts as a spinlock for normal cases, but falls back
+ * to builtin monitor to block when (rarely) needed. This would be
+ * a terrible idea for a highly contended lock, but works fine as
+ * a more conservative alternative to a pure spinlock.
+ */
+ private int acquirePlock() {
+ int spins = PL_SPINS, r = 0, ps, nps;
+ for (;;) {
+ if (((ps = plock) & PL_LOCK) == 0 &&
+ U.compareAndSwapInt(this, PLOCK, ps, nps = ps + PL_LOCK))
+ return nps;
+ else if (r == 0) { // randomize spins if possible
+ Thread t = Thread.currentThread(); WorkQueue w; Submitter z;
+ if ((t instanceof ForkJoinWorkerThread) &&
+ (w = ((ForkJoinWorkerThread)t).workQueue) != null)
+ r = w.seed;
+ else if ((z = submitters.get()) != null)
+ r = z.seed;
+ else
+ r = 1;
+ }
+ else if (spins >= 0) {
+ r ^= r << 1; r ^= r >>> 3; r ^= r << 10; // xorshift
+ if (r >= 0)
+ --spins;
+ }
+ else if (U.compareAndSwapInt(this, PLOCK, ps, ps | PL_SIGNAL)) {
+ synchronized (this) {
+ if ((plock & PL_SIGNAL) != 0) {
+ try {
+ wait();
+ } catch (InterruptedException ie) {
+ try {
+ Thread.currentThread().interrupt();
+ } catch (SecurityException ignore) {
+ }
+ }
+ }
+ else
+ notifyAll();
+ }
+ }
+ }
+ }
/**
- * Tries to create and start a worker
+ * Unlocks and signals any thread waiting for plock. Called only
+ * when CAS of seq value for unlock fails.
*/
- private void addWorker() {
- Throwable ex = null;
- ForkJoinWorkerThread wt = null;
- try {
- if ((wt = factory.newThread(this)) != null) {
- wt.start();
- return;
- }
- } catch (Throwable e) {
- ex = e;
- }
- deregisterWorker(wt, ex); // adjust counts etc on failure
+ private void releasePlock(int ps) {
+ plock = ps;
+ synchronized (this) { notifyAll(); }
}
/**
- * Callback from ForkJoinWorkerThread constructor to assign a
- * public name. This must be separate from registerWorker because
- * it is called during the "super" constructor call in
- * ForkJoinWorkerThread.
+ * Tries to create and start one worker if fewer than target
+ * parallelism level exist. Adjusts counts etc on failure.
*/
- final String nextWorkerName() {
- return workerNamePrefix.concat
- (Integer.toString(nextWorkerNumber.addAndGet(1)));
+ private void tryAddWorker() {
+ long c; int u;
+ while ((u = (int)((c = ctl) >>> 32)) < 0 &&
+ (u & SHORT_SIGN) != 0 && (int)c == 0) {
+ long nc = (long)(((u + UTC_UNIT) & UTC_MASK) |
+ ((u + UAC_UNIT) & UAC_MASK)) << 32;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ ForkJoinWorkerThreadFactory fac;
+ Throwable ex = null;
+ ForkJoinWorkerThread wt = null;
+ try {
+ if ((fac = factory) != null &&
+ (wt = fac.newThread(this)) != null) {
+ wt.start();
+ break;
+ }
+ } catch (Throwable e) {
+ ex = e;
+ }
+ deregisterWorker(wt, ex);
+ break;
+ }
+ }
}
+ // Registering and deregistering workers
+
/**
- * Callback from ForkJoinWorkerThread constructor to establish its
- * poolIndex and record its WorkQueue. To avoid scanning bias due
- * to packing entries in front of the workQueues array, we treat
- * the array as a simple power-of-two hash table using per-thread
- * seed as hash, expanding as needed.
+ * Callback from ForkJoinWorkerThread to establish and record its
+ * WorkQueue. To avoid scanning bias due to packing entries in
+ * front of the workQueues array, we treat the array as a simple
+ * power-of-two hash table using per-thread seed as hash,
+ * expanding as needed.
*
- * @param w the worker's queue
+ * @param wt the worker thread
+ * @return the worker's queue
*/
-
- final void registerWorker(WorkQueue w) {
- Mutex lock = this.lock;
- lock.lock();
+ final WorkQueue registerWorker(ForkJoinWorkerThread wt) {
+ Thread.UncaughtExceptionHandler handler; WorkQueue[] ws; int s, ps;
+ wt.setDaemon(true);
+ if ((handler = ueh) != null)
+ wt.setUncaughtExceptionHandler(handler);
+ do {} while (!U.compareAndSwapInt(this, INDEXSEED, s = indexSeed,
+ s += SEED_INCREMENT) ||
+ s == 0); // skip 0
+ WorkQueue w = new WorkQueue(this, wt, config >>> 16, s);
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
try {
- WorkQueue[] ws = workQueues;
- if (w != null && ws != null) { // skip on shutdown/failure
- int rs, n = ws.length, m = n - 1;
- int s = nextSeed += SEED_INCREMENT; // rarely-colliding sequence
- w.seed = (s == 0) ? 1 : s; // ensure non-zero seed
- int r = (s << 1) | 1; // use odd-numbered indices
- if (ws[r &= m] != null) { // collision
- int probes = 0; // step by approx half size
- int step = (n <= 4) ? 2 : ((n >>> 1) & SQMASK) + 2;
+ if ((ws = workQueues) != null) { // skip if shutting down
+ int n = ws.length, m = n - 1;
+ int r = (s << 1) | 1; // use odd-numbered indices
+ if (ws[r &= m] != null) { // collision
+ int probes = 0; // step by approx half size
+ int step = (n <= 4) ? 2 : ((n >>> 1) & EVENMASK) + 2;
while (ws[r = (r + step) & m] != null) {
if (++probes >= n) {
workQueues = ws = Arrays.copyOf(ws, n <<= 1);
@@ -1322,104 +1721,206 @@ public class ForkJoinPool extends AbstractExecutorService {
}
}
}
- w.eventCount = w.poolIndex = r; // establish before recording
- ws[r] = w; // also update seq
- runState = ((rs = runState) & SHUTDOWN) | ((rs + 2) & ~SHUTDOWN);
+ w.eventCount = w.poolIndex = r; // volatile write orders
+ ws[r] = w;
}
} finally {
- lock.unlock();
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
}
+ wt.setName(workerNamePrefix.concat(Integer.toString(w.poolIndex)));
+ return w;
}
/**
* Final callback from terminating worker, as well as upon failure
- * to construct or start a worker in addWorker. Removes record of
- * worker from array, and adjusts counts. If pool is shutting
- * down, tries to complete termination.
+ * to construct or start a worker. Removes record of worker from
+ * array, and adjusts counts. If pool is shutting down, tries to
+ * complete termination.
*
- * @param wt the worker thread or null if addWorker failed
+ * @param wt the worker thread or null if construction failed
* @param ex the exception causing failure, or null if none
*/
final void deregisterWorker(ForkJoinWorkerThread wt, Throwable ex) {
- Mutex lock = this.lock;
WorkQueue w = null;
if (wt != null && (w = wt.workQueue) != null) {
- w.runState = -1; // ensure runState is set
- stealCount.getAndAdd(w.totalSteals + w.nsteals);
- int idx = w.poolIndex;
- lock.lock();
- try { // remove record from array
+ int ps;
+ w.qlock = -1; // ensure set
+ long ns = w.nsteals, sc; // collect steal count
+ do {} while (!U.compareAndSwapLong(this, STEALCOUNT,
+ sc = stealCount, sc + ns));
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ try {
+ int idx = w.poolIndex;
WorkQueue[] ws = workQueues;
if (ws != null && idx >= 0 && idx < ws.length && ws[idx] == w)
ws[idx] = null;
} finally {
- lock.unlock();
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
}
}
- long c; // adjust ctl counts
+ long c; // adjust ctl counts
do {} while (!U.compareAndSwapLong
(this, CTL, c = ctl, (((c - AC_UNIT) & AC_MASK) |
((c - TC_UNIT) & TC_MASK) |
(c & ~(AC_MASK|TC_MASK)))));
- if (!tryTerminate(false, false) && w != null) {
- w.cancelAll(); // cancel remaining tasks
- if (w.array != null) // suppress signal if never ran
- signalWork(); // wake up or create replacement
- if (ex == null) // help clean refs on way out
- ForkJoinTask.helpExpungeStaleExceptions();
+ if (!tryTerminate(false, false) && w != null && w.array != null) {
+ w.cancelAll(); // cancel remaining tasks
+ WorkQueue[] ws; WorkQueue v; Thread p; int u, i, e;
+ while ((u = (int)((c = ctl) >>> 32)) < 0 && (e = (int)c) >= 0) {
+ if (e > 0) { // activate or create replacement
+ if ((ws = workQueues) == null ||
+ (i = e & SMASK) >= ws.length ||
+ (v = ws[i]) == null)
+ break;
+ long nc = (((long)(v.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (v.eventCount != (e | INT_SIGN))
+ break;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ v.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = v.parker) != null)
+ U.unpark(p);
+ break;
+ }
+ }
+ else {
+ if ((short)u < 0)
+ tryAddWorker();
+ break;
+ }
+ }
}
-
- if (ex != null) // rethrow
+ if (ex == null) // help clean refs on way out
+ ForkJoinTask.helpExpungeStaleExceptions();
+ else // rethrow
ForkJoinTask.rethrow(ex);
}
-
// Submissions
/**
* Unless shutting down, adds the given task to a submission queue
* at submitter's current queue index (modulo submission
- * range). If no queue exists at the index, one is created. If
- * the queue is busy, another index is randomly chosen. The
- * submitMask bounds the effective number of queues to the
- * (nearest power of two for) parallelism level.
+ * range). Only the most common path is directly handled in this
+ * method. All others are relayed to fullExternalPush.
*
* @param task the task. Caller must ensure non-null.
*/
- private void doSubmit(ForkJoinTask<?> task) {
- Submitter s = submitters.get();
- for (int r = s.seed, m = submitMask;;) {
- WorkQueue[] ws; WorkQueue q;
- int k = r & m & SQMASK; // use only even indices
- if (runState < 0 || (ws = workQueues) == null || ws.length <= k)
- throw new RejectedExecutionException(); // shutting down
- else if ((q = ws[k]) == null) { // create new queue
- WorkQueue nq = new WorkQueue(this, null, SHARED_QUEUE);
- Mutex lock = this.lock; // construct outside lock
- lock.lock();
- try { // recheck under lock
- int rs = runState; // to update seq
- if (ws == workQueues && ws[k] == null) {
- ws[k] = nq;
- runState = ((rs & SHUTDOWN) | ((rs + 2) & ~SHUTDOWN));
- }
- } finally {
- lock.unlock();
- }
- }
- else if (q.trySharedPush(task)) {
- signalWork();
+ final void externalPush(ForkJoinTask<?> task) {
+ WorkQueue[] ws; WorkQueue q; Submitter z; int m; ForkJoinTask<?>[] a;
+ if ((z = submitters.get()) != null && plock > 0 &&
+ (ws = workQueues) != null && (m = (ws.length - 1)) >= 0 &&
+ (q = ws[m & z.seed & SQMASK]) != null &&
+ U.compareAndSwapInt(q, QLOCK, 0, 1)) { // lock
+ int b = q.base, s = q.top, n, an;
+ if ((a = q.array) != null && (an = a.length) > (n = s + 1 - b)) {
+ int j = (((an - 1) & s) << ASHIFT) + ABASE;
+ U.putOrderedObject(a, j, task);
+ q.top = s + 1; // push on to deque
+ q.qlock = 0;
+ if (n <= 2)
+ signalWork(q);
return;
}
- else if (m > 1) { // move to a different index
- r ^= r << 13; // same xorshift as WorkQueues
+ q.qlock = 0;
+ }
+ fullExternalPush(task);
+ }
+
+ /**
+ * Full version of externalPush. This method is called, among
+ * other times, upon the first submission of the first task to the
+ * pool, so must perform secondary initialization. It also
+ * detects first submission by an external thread by looking up
+ * its ThreadLocal, and creates a new shared queue if the one at
+ * index if empty or contended. The plock lock body must be
+ * exception-free (so no try/finally) so we optimistically
+ * allocate new queues outside the lock and throw them away if
+ * (very rarely) not needed.
+ *
+ * Secondary initialization occurs when plock is zero, to create
+ * workQueue array and set plock to a valid value. This lock body
+ * must also be exception-free. Because the plock seq value can
+ * eventually wrap around zero, this method harmlessly fails to
+ * reinitialize if workQueues exists, while still advancing plock.
+ */
+ private void fullExternalPush(ForkJoinTask<?> task) {
+ int r = 0; // random index seed
+ for (Submitter z = submitters.get();;) {
+ WorkQueue[] ws; WorkQueue q; int ps, m, k;
+ if (z == null) {
+ if (U.compareAndSwapInt(this, INDEXSEED, r = indexSeed,
+ r += SEED_INCREMENT) && r != 0)
+ submitters.set(z = new Submitter(r));
+ }
+ else if (r == 0) { // move to a different index
+ r = z.seed;
+ r ^= r << 13; // same xorshift as WorkQueues
r ^= r >>> 17;
- s.seed = r ^= r << 5;
+ z.seed = r ^ (r << 5);
+ }
+ else if ((ps = plock) < 0)
+ throw new RejectedExecutionException();
+ else if (ps == 0 || (ws = workQueues) == null ||
+ (m = ws.length - 1) < 0) { // initialize workQueues
+ int p = config & SMASK; // find power of two table size
+ int n = (p > 1) ? p - 1 : 1; // ensure at least 2 slots
+ n |= n >>> 1; n |= n >>> 2; n |= n >>> 4;
+ n |= n >>> 8; n |= n >>> 16; n = (n + 1) << 1;
+ WorkQueue[] nws = ((ws = workQueues) == null || ws.length == 0 ?
+ new WorkQueue[n] : null);
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ if (((ws = workQueues) == null || ws.length == 0) && nws != null)
+ workQueues = nws;
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
+ else if ((q = ws[k = r & m & SQMASK]) != null) {
+ if (q.qlock == 0 && U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ ForkJoinTask<?>[] a = q.array;
+ int s = q.top;
+ boolean submitted = false;
+ try { // locked version of push
+ if ((a != null && a.length > s + 1 - q.base) ||
+ (a = q.growArray()) != null) { // must presize
+ int j = (((a.length - 1) & s) << ASHIFT) + ABASE;
+ U.putOrderedObject(a, j, task);
+ q.top = s + 1;
+ submitted = true;
+ }
+ } finally {
+ q.qlock = 0; // unlock
+ }
+ if (submitted) {
+ signalWork(q);
+ return;
+ }
+ }
+ r = 0; // move on failure
+ }
+ else if (((ps = plock) & PL_LOCK) == 0) { // create new queue
+ q = new WorkQueue(this, null, SHARED_QUEUE, r);
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ if ((ws = workQueues) != null && k < ws.length && ws[k] == null)
+ ws[k] = q;
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
}
else
- Thread.yield(); // yield if no alternatives
+ r = 0; // try elsewhere while lock held
}
}
@@ -1434,37 +1935,37 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
- * Tries to activate or create a worker if too few are active.
+ * Tries to create or activate a worker if too few are active.
+ *
+ * @param q the (non-null) queue holding tasks to be signalled
*/
- final void signalWork() {
- long c; int u;
- while ((u = (int)((c = ctl) >>> 32)) < 0) { // too few active
- WorkQueue[] ws = workQueues; int e, i; WorkQueue w; Thread p;
- if ((e = (int)c) > 0) { // at least one waiting
- if (ws != null && (i = e & SMASK) < ws.length &&
+ final void signalWork(WorkQueue q) {
+ int hint = q.poolIndex;
+ long c; int e, u, i, n; WorkQueue[] ws; WorkQueue w; Thread p;
+ while ((u = (int)((c = ctl) >>> 32)) < 0) {
+ if ((e = (int)c) > 0) {
+ if ((ws = workQueues) != null && ws.length > (i = e & SMASK) &&
(w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
long nc = (((long)(w.nextWait & E_MASK)) |
((long)(u + UAC_UNIT) << 32));
if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.hint = hint;
w.eventCount = (e + E_SEQ) & E_MASK;
if ((p = w.parker) != null)
- U.unpark(p); // activate and release
+ U.unpark(p);
break;
}
+ if (q.top - q.base <= 0)
+ break;
}
else
break;
}
- else if (e == 0 && (u & SHORT_SIGN) != 0) { // too few total
- long nc = (long)(((u + UTC_UNIT) & UTC_MASK) |
- ((u + UAC_UNIT) & UAC_MASK)) << 32;
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- addWorker();
- break;
- }
- }
- else
+ else {
+ if ((short)u < 0)
+ tryAddWorker();
break;
+ }
}
}
@@ -1474,8 +1975,8 @@ public class ForkJoinPool extends AbstractExecutorService {
* Top-level runloop for workers, called by ForkJoinWorkerThread.run.
*/
final void runWorker(WorkQueue w) {
- w.growArray(false); // initialize queue array in this thread
- do { w.runTask(scan(w)); } while (w.runState >= 0);
+ w.growArray(); // allocate queue
+ do { w.runTask(scan(w)); } while (w.qlock >= 0);
}
/**
@@ -1486,116 +1987,122 @@ public class ForkJoinPool extends AbstractExecutorService {
* contention, or state changes that indicate possible success on
* re-invocation.
*
- * The scan searches for tasks across a random permutation of
- * queues (starting at a random index and stepping by a random
- * relative prime, checking each at least once). The scan
- * terminates upon either finding a non-empty queue, or completing
- * the sweep. If the worker is not inactivated, it takes and
- * returns a task from this queue. On failure to find a task, we
- * take one of the following actions, after which the caller will
- * retry calling this method unless terminated.
+ * The scan searches for tasks across queues (starting at a random
+ * index, and relying on registerWorker to irregularly scatter
+ * them within array to avoid bias), checking each at least twice.
+ * The scan terminates upon either finding a non-empty queue, or
+ * completing the sweep. If the worker is not inactivated, it
+ * takes and returns a task from this queue. Otherwise, if not
+ * activated, it signals workers (that may include itself) and
+ * returns so caller can retry. Also returns for true if the
+ * worker array may have changed during an empty scan. On failure
+ * to find a task, we take one of the following actions, after
+ * which the caller will retry calling this method unless
+ * terminated.
*
* * If pool is terminating, terminate the worker.
*
- * * If not a complete sweep, try to release a waiting worker. If
- * the scan terminated because the worker is inactivated, then the
- * released worker will often be the calling worker, and it can
- * succeed obtaining a task on the next call. Or maybe it is
- * another worker, but with same net effect. Releasing in other
- * cases as well ensures that we have enough workers running.
- *
* * If not already enqueued, try to inactivate and enqueue the
* worker on wait queue. Or, if inactivating has caused the pool
- * to be quiescent, relay to idleAwaitWork to check for
- * termination and possibly shrink pool.
+ * to be quiescent, relay to idleAwaitWork to possibly shrink
+ * pool.
*
- * * If already inactive, and the caller has run a task since the
- * last empty scan, return (to allow rescan) unless others are
- * also inactivated. Field WorkQueue.rescans counts down on each
- * scan to ensure eventual inactivation and blocking.
+ * * If already enqueued and none of the above apply, possibly
+ * park awaiting signal, else lingering to help scan and signal.
*
- * * If already enqueued and none of the above apply, park
- * awaiting signal,
+ * * If a non-empty queue discovered or left as a hint,
+ * help wake up other workers before return.
*
* @param w the worker (via its WorkQueue)
- * @return a task or null of none found
+ * @return a task or null if none found
*/
private final ForkJoinTask<?> scan(WorkQueue w) {
- WorkQueue[] ws; // first update random seed
- int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
- int rs = runState, m; // volatile read order matters
- if ((ws = workQueues) != null && (m = ws.length - 1) > 0) {
- int ec = w.eventCount; // ec is negative if inactive
- int step = (r >>> 16) | 1; // relative prime
- for (int j = (m + 1) << 2; ; r += step) {
- WorkQueue q; ForkJoinTask<?> t; ForkJoinTask<?>[] a; int b;
- if ((q = ws[r & m]) != null && (b = q.base) - q.top < 0 &&
- (a = q.array) != null) { // probably nonempty
+ WorkQueue[] ws; int m;
+ int ps = plock; // read plock before ws
+ if (w != null && (ws = workQueues) != null && (m = ws.length - 1) >= 0) {
+ int ec = w.eventCount; // ec is negative if inactive
+ int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
+ w.hint = -1; // update seed and clear hint
+ int j = ((m + m + 1) | MIN_SCAN) & MAX_SCAN;
+ do {
+ WorkQueue q; ForkJoinTask<?>[] a; int b;
+ if ((q = ws[(r + j) & m]) != null && (b = q.base) - q.top < 0 &&
+ (a = q.array) != null) { // probably nonempty
int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
- t = (ForkJoinTask<?>)U.getObjectVolatile(a, i);
+ ForkJoinTask<?> t = (ForkJoinTask<?>)
+ U.getObjectVolatile(a, i);
if (q.base == b && ec >= 0 && t != null &&
U.compareAndSwapObject(a, i, t, null)) {
- q.base = b + 1; // specialization of pollAt
- return t;
+ if ((q.base = b + 1) - q.top < 0)
+ signalWork(q);
+ return t; // taken
}
- else if (ec < 0 || j <= m) {
- rs = 0; // mark scan as imcomplete
- break; // caller can retry after release
+ else if ((ec < 0 || j < m) && (int)(ctl >> AC_SHIFT) <= 0) {
+ w.hint = (r + j) & m; // help signal below
+ break; // cannot take
}
}
- if (--j < 0)
- break;
- }
+ } while (--j >= 0);
- long c = ctl; int e = (int)c, a = (int)(c >> AC_SHIFT), nr, ns;
- if (e < 0) // decode ctl on empty scan
- w.runState = -1; // pool is terminating
- else if (rs == 0 || rs != runState) { // incomplete scan
- WorkQueue v; Thread p; // try to release a waiter
- if (e > 0 && a < 0 && w.eventCount == ec &&
- (v = ws[e & m]) != null && v.eventCount == (e | INT_SIGN)) {
- long nc = ((long)(v.nextWait & E_MASK) |
- ((c + AC_UNIT) & (AC_MASK|TC_MASK)));
- if (ctl == c && U.compareAndSwapLong(this, CTL, c, nc)) {
+ int h, e, ns; long c, sc; WorkQueue q;
+ if ((ns = w.nsteals) != 0) {
+ if (U.compareAndSwapLong(this, STEALCOUNT,
+ sc = stealCount, sc + ns))
+ w.nsteals = 0; // collect steals and rescan
+ }
+ else if (plock != ps) // consistency check
+ ; // skip
+ else if ((e = (int)(c = ctl)) < 0)
+ w.qlock = -1; // pool is terminating
+ else {
+ if ((h = w.hint) < 0) {
+ if (ec >= 0) { // try to enqueue/inactivate
+ long nc = (((long)ec |
+ ((c - AC_UNIT) & (AC_MASK|TC_MASK))));
+ w.nextWait = e; // link and mark inactive
+ w.eventCount = ec | INT_SIGN;
+ if (ctl != c || !U.compareAndSwapLong(this, CTL, c, nc))
+ w.eventCount = ec; // unmark on CAS failure
+ else if ((int)(c >> AC_SHIFT) == 1 - (config & SMASK))
+ idleAwaitWork(w, nc, c);
+ }
+ else if (w.eventCount < 0 && ctl == c) {
+ Thread wt = Thread.currentThread();
+ Thread.interrupted(); // clear status
+ U.putObject(wt, PARKBLOCKER, this);
+ w.parker = wt; // emulate LockSupport.park
+ if (w.eventCount < 0) // recheck
+ U.park(false, 0L); // block
+ w.parker = null;
+ U.putObject(wt, PARKBLOCKER, null);
+ }
+ }
+ if ((h >= 0 || (h = w.hint) >= 0) &&
+ (ws = workQueues) != null && h < ws.length &&
+ (q = ws[h]) != null) { // signal others before retry
+ WorkQueue v; Thread p; int u, i, s;
+ for (int n = (config & SMASK) - 1;;) {
+ int idleCount = (w.eventCount < 0) ? 0 : -1;
+ if (((s = idleCount - q.base + q.top) <= n &&
+ (n = s) <= 0) ||
+ (u = (int)((c = ctl) >>> 32)) >= 0 ||
+ (e = (int)c) <= 0 || m < (i = e & SMASK) ||
+ (v = ws[i]) == null)
+ break;
+ long nc = (((long)(v.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (v.eventCount != (e | INT_SIGN) ||
+ !U.compareAndSwapLong(this, CTL, c, nc))
+ break;
+ v.hint = h;
v.eventCount = (e + E_SEQ) & E_MASK;
if ((p = v.parker) != null)
U.unpark(p);
+ if (--n <= 0)
+ break;
}
}
}
- else if (ec >= 0) { // try to enqueue/inactivate
- long nc = (long)ec | ((c - AC_UNIT) & (AC_MASK|TC_MASK));
- w.nextWait = e;
- w.eventCount = ec | INT_SIGN; // mark as inactive
- if (ctl != c || !U.compareAndSwapLong(this, CTL, c, nc))
- w.eventCount = ec; // unmark on CAS failure
- else {
- if ((ns = w.nsteals) != 0) {
- w.nsteals = 0; // set rescans if ran task
- w.rescans = (a > 0) ? 0 : a + parallelism;
- w.totalSteals += ns;
- }
- if (a == 1 - parallelism) // quiescent
- idleAwaitWork(w, nc, c);
- }
- }
- else if (w.eventCount < 0) { // already queued
- if ((nr = w.rescans) > 0) { // continue rescanning
- int ac = a + parallelism;
- if (((w.rescans = (ac < nr) ? ac : nr - 1) & 3) == 0)
- Thread.yield(); // yield before block
- }
- else {
- Thread.interrupted(); // clear status
- Thread wt = Thread.currentThread();
- U.putObject(wt, PARKBLOCKER, this);
- w.parker = wt; // emulate LockSupport.park
- if (w.eventCount < 0) // recheck
- U.park(false, 0L);
- w.parker = null;
- U.putObject(wt, PARKBLOCKER, null);
- }
- }
}
return null;
}
@@ -1603,8 +2110,8 @@ public class ForkJoinPool extends AbstractExecutorService {
/**
* If inactivating worker w has caused the pool to become
* quiescent, checks for pool termination, and, so long as this is
- * not the only worker, waits for event for up to SHRINK_RATE
- * nanosecs. On timeout, if ctl has not changed, terminates the
+ * not the only worker, waits for event for up to a given
+ * duration. On timeout, if ctl has not changed, terminates the
* worker, which will in turn wake up another worker to possibly
* repeat this process.
*
@@ -1613,25 +2120,28 @@ public class ForkJoinPool extends AbstractExecutorService {
* @param prevCtl the ctl value to restore if thread is terminated
*/
private void idleAwaitWork(WorkQueue w, long currentCtl, long prevCtl) {
- if (w.eventCount < 0 && !tryTerminate(false, false) &&
- (int)prevCtl != 0 && !hasQueuedSubmissions() && ctl == currentCtl) {
+ if (w != null && w.eventCount < 0 &&
+ !tryTerminate(false, false) && (int)prevCtl != 0 &&
+ ctl == currentCtl) {
+ int dc = -(short)(currentCtl >>> TC_SHIFT);
+ long parkTime = dc < 0 ? FAST_IDLE_TIMEOUT: (dc + 1) * IDLE_TIMEOUT;
+ long deadline = System.nanoTime() + parkTime - TIMEOUT_SLOP;
Thread wt = Thread.currentThread();
- Thread.yield(); // yield before block
while (ctl == currentCtl) {
- long startTime = System.nanoTime();
Thread.interrupted(); // timed variant of version in scan()
U.putObject(wt, PARKBLOCKER, this);
w.parker = wt;
if (ctl == currentCtl)
- U.park(false, SHRINK_RATE);
+ U.park(false, parkTime);
w.parker = null;
U.putObject(wt, PARKBLOCKER, null);
if (ctl != currentCtl)
break;
- if (System.nanoTime() - startTime >= SHRINK_TIMEOUT &&
+ if (deadline - System.nanoTime() <= 0L &&
U.compareAndSwapLong(this, CTL, currentCtl, prevCtl)) {
w.eventCount = (w.eventCount + E_SEQ) | E_MASK;
- w.runState = -1; // shrink
+ w.hint = -1;
+ w.qlock = -1; // shrink
break;
}
}
@@ -1639,6 +2149,47 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
+ * Scans through queues looking for work while joining a task; if
+ * any present, signals. May return early if more signalling is
+ * detectably unneeded.
+ *
+ * @param task return early if done
+ * @param origin an index to start scan
+ */
+ private void helpSignal(ForkJoinTask<?> task, int origin) {
+ WorkQueue[] ws; WorkQueue w; Thread p; long c; int m, u, e, i, s;
+ if (task != null && task.status >= 0 &&
+ (u = (int)(ctl >>> 32)) < 0 && (u >> UAC_SHIFT) < 0 &&
+ (ws = workQueues) != null && (m = ws.length - 1) >= 0) {
+ outer: for (int k = origin, j = m; j >= 0; --j) {
+ WorkQueue q = ws[k++ & m];
+ for (int n = m;;) { // limit to at most m signals
+ if (task.status < 0)
+ break outer;
+ if (q == null ||
+ ((s = -q.base + q.top) <= n && (n = s) <= 0))
+ break;
+ if ((u = (int)((c = ctl) >>> 32)) >= 0 ||
+ (e = (int)c) <= 0 || m < (i = e & SMASK) ||
+ (w = ws[i]) == null)
+ break outer;
+ long nc = (((long)(w.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (w.eventCount != (e | INT_SIGN))
+ break outer;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ if (--n <= 0)
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ /**
* Tries to locate and execute tasks for a stealer of the given
* task, or in turn one of its stealers, Traces currentSteal ->
* currentJoin links looking for a thread working on a descendant
@@ -1669,7 +2220,7 @@ public class ForkJoinPool extends AbstractExecutorService {
}
if ((ws = workQueues) == null || (m = ws.length - 1) <= 0)
break restart; // shutting down
- if ((v = ws[h = (j.stealHint | 1) & m]) == null ||
+ if ((v = ws[h = (j.hint | 1) & m]) == null ||
v.currentSteal != subtask) {
for (int origin = h;;) { // find stealer
if (((h = (h + 2) & m) & 15) == 1 &&
@@ -1677,7 +2228,7 @@ public class ForkJoinPool extends AbstractExecutorService {
continue restart; // occasional staleness check
if ((v = ws[h]) != null &&
v.currentSteal == subtask) {
- j.stealHint = h; // save hint
+ j.hint = h; // save hint
break;
}
if (h == origin)
@@ -1725,88 +2276,77 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
- * If task is at base of some steal queue, steals and executes it.
+ * Analog of tryHelpStealer for CountedCompleters. Tries to steal
+ * and run tasks within the target's computation.
*
- * @param joiner the joining worker
- * @param task the task
- */
- private void tryPollForAndExec(WorkQueue joiner, ForkJoinTask<?> task) {
- WorkQueue[] ws;
- if ((ws = workQueues) != null) {
- for (int j = 1; j < ws.length && task.status >= 0; j += 2) {
- WorkQueue q = ws[j];
- if (q != null && q.pollFor(task)) {
- joiner.runSubtask(task);
- break;
+ * @param task the task to join
+ * @param mode if shared, exit upon completing any task
+ * if all workers are active
+ */
+ private int helpComplete(ForkJoinTask<?> task, int mode) {
+ WorkQueue[] ws; WorkQueue q; int m, n, s, u;
+ if (task != null && (ws = workQueues) != null &&
+ (m = ws.length - 1) >= 0) {
+ for (int j = 1, origin = j;;) {
+ if ((s = task.status) < 0)
+ return s;
+ if ((q = ws[j & m]) != null && q.pollAndExecCC(task)) {
+ origin = j;
+ if (mode == SHARED_QUEUE &&
+ ((u = (int)(ctl >>> 32)) >= 0 || (u >> UAC_SHIFT) >= 0))
+ break;
}
+ else if ((j = (j + 2) & m) == origin)
+ break;
}
}
+ return 0;
}
/**
* Tries to decrement active count (sometimes implicitly) and
* possibly release or create a compensating worker in preparation
* for blocking. Fails on contention or termination. Otherwise,
- * adds a new thread if no idle workers are available and either
- * pool would become completely starved or: (at least half
- * starved, and fewer than 50% spares exist, and there is at least
- * one task apparently available). Even though the availability
- * check requires a full scan, it is worthwhile in reducing false
- * alarms.
- *
- * @param task if non-null, a task being waited for
- * @param blocker if non-null, a blocker being waited for
- * @return true if the caller can block, else should recheck and retry
- */
- final boolean tryCompensate(ForkJoinTask<?> task, ManagedBlocker blocker) {
- int pc = parallelism, e;
- long c = ctl;
- WorkQueue[] ws = workQueues;
- if ((e = (int)c) >= 0 && ws != null) {
- int u, a, ac, hc;
- int tc = (short)((u = (int)(c >>> 32)) >>> UTC_SHIFT) + pc;
- boolean replace = false;
- if ((a = u >> UAC_SHIFT) <= 0) {
- if ((ac = a + pc) <= 1)
- replace = true;
- else if ((e > 0 || (task != null &&
- ac <= (hc = pc >>> 1) && tc < pc + hc))) {
- WorkQueue w;
- for (int j = 0; j < ws.length; ++j) {
- if ((w = ws[j]) != null && !w.isEmpty()) {
- replace = true;
- break; // in compensation range and tasks available
- }
- }
+ * adds a new thread if no idle workers are available and pool
+ * may become starved.
+ */
+ final boolean tryCompensate() {
+ int pc = config & SMASK, e, i, tc; long c;
+ WorkQueue[] ws; WorkQueue w; Thread p;
+ if ((ws = workQueues) != null && (e = (int)(c = ctl)) >= 0) {
+ if (e != 0 && (i = e & SMASK) < ws.length &&
+ (w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
+ long nc = ((long)(w.nextWait & E_MASK) |
+ (c & (AC_MASK|TC_MASK)));
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ return true; // replace with idle worker
}
}
- if ((task == null || task.status >= 0) && // recheck need to block
- (blocker == null || !blocker.isReleasable()) && ctl == c) {
- if (!replace) { // no compensation
- long nc = ((c - AC_UNIT) & AC_MASK) | (c & ~AC_MASK);
- if (U.compareAndSwapLong(this, CTL, c, nc))
- return true;
- }
- else if (e != 0) { // release an idle worker
- WorkQueue w; Thread p; int i;
- if ((i = e & SMASK) < ws.length && (w = ws[i]) != null) {
- long nc = ((long)(w.nextWait & E_MASK) |
- (c & (AC_MASK|TC_MASK)));
- if (w.eventCount == (e | INT_SIGN) &&
- U.compareAndSwapLong(this, CTL, c, nc)) {
- w.eventCount = (e + E_SEQ) & E_MASK;
- if ((p = w.parker) != null)
- U.unpark(p);
+ else if ((tc = (short)(c >>> TC_SHIFT)) >= 0 &&
+ (int)(c >> AC_SHIFT) + pc > 1) {
+ long nc = ((c - AC_UNIT) & AC_MASK) | (c & ~AC_MASK);
+ if (U.compareAndSwapLong(this, CTL, c, nc))
+ return true; // no compensation
+ }
+ else if (tc + pc < MAX_CAP) {
+ long nc = ((c + TC_UNIT) & TC_MASK) | (c & ~TC_MASK);
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ ForkJoinWorkerThreadFactory fac;
+ Throwable ex = null;
+ ForkJoinWorkerThread wt = null;
+ try {
+ if ((fac = factory) != null &&
+ (wt = fac.newThread(this)) != null) {
+ wt.start();
return true;
}
+ } catch (Throwable rex) {
+ ex = rex;
}
- }
- else if (tc < MAX_CAP) { // create replacement
- long nc = ((c + TC_UNIT) & TC_MASK) | (c & ~TC_MASK);
- if (U.compareAndSwapLong(this, CTL, c, nc)) {
- addWorker();
- return true;
- }
+ deregisterWorker(wt, ex); // clean up and return false
}
}
}
@@ -1821,25 +2361,25 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return task status on exit
*/
final int awaitJoin(WorkQueue joiner, ForkJoinTask<?> task) {
- int s;
- if ((s = task.status) >= 0) {
+ int s = 0;
+ if (joiner != null && task != null && (s = task.status) >= 0) {
ForkJoinTask<?> prevJoin = joiner.currentJoin;
joiner.currentJoin = task;
- long startTime = 0L;
- for (int k = 0;;) {
- if ((s = (joiner.isEmpty() ? // try to help
- tryHelpStealer(joiner, task) :
- joiner.tryRemoveAndExec(task))) == 0 &&
+ do {} while ((s = task.status) >= 0 && !joiner.isEmpty() &&
+ joiner.tryRemoveAndExec(task)); // process local tasks
+ if (s >= 0 && (s = task.status) >= 0) {
+ helpSignal(task, joiner.poolIndex);
+ if ((s = task.status) >= 0 &&
+ (task instanceof CountedCompleter))
+ s = helpComplete(task, LIFO_QUEUE);
+ }
+ while (s >= 0 && (s = task.status) >= 0) {
+ if ((!joiner.isEmpty() || // try helping
+ (s = tryHelpStealer(joiner, task)) == 0) &&
(s = task.status) >= 0) {
- if (k == 0) {
- startTime = System.nanoTime();
- tryPollForAndExec(joiner, task); // check uncommon case
- }
- else if ((k & (MAX_HELP - 1)) == 0 &&
- System.nanoTime() - startTime >=
- COMPENSATION_DELAY &&
- tryCompensate(task, null)) {
- if (task.trySetSignal()) {
+ helpSignal(task, joiner.poolIndex);
+ if ((s = task.status) >= 0 && tryCompensate()) {
+ if (task.trySetSignal() && (s = task.status) >= 0) {
synchronized (task) {
if (task.status >= 0) {
try { // see ForkJoinTask
@@ -1856,13 +2396,8 @@ public class ForkJoinPool extends AbstractExecutorService {
(this, CTL, c = ctl, c + AC_UNIT));
}
}
- if (s < 0 || (s = task.status) < 0) {
- joiner.currentJoin = prevJoin;
- break;
- }
- else if ((k++ & (MAX_HELP - 1)) == MAX_HELP >>> 1)
- Thread.yield(); // for politeness
}
+ joiner.currentJoin = prevJoin;
}
return s;
}
@@ -1874,46 +2409,49 @@ public class ForkJoinPool extends AbstractExecutorService {
*
* @param joiner the joining worker
* @param task the task
- * @return task status on exit
*/
- final int helpJoinOnce(WorkQueue joiner, ForkJoinTask<?> task) {
+ final void helpJoinOnce(WorkQueue joiner, ForkJoinTask<?> task) {
int s;
- while ((s = task.status) >= 0 &&
- (joiner.isEmpty() ?
- tryHelpStealer(joiner, task) :
- joiner.tryRemoveAndExec(task)) != 0)
- ;
- return s;
+ if (joiner != null && task != null && (s = task.status) >= 0) {
+ ForkJoinTask<?> prevJoin = joiner.currentJoin;
+ joiner.currentJoin = task;
+ do {} while ((s = task.status) >= 0 && !joiner.isEmpty() &&
+ joiner.tryRemoveAndExec(task));
+ if (s >= 0 && (s = task.status) >= 0) {
+ helpSignal(task, joiner.poolIndex);
+ if ((s = task.status) >= 0 &&
+ (task instanceof CountedCompleter))
+ s = helpComplete(task, LIFO_QUEUE);
+ }
+ if (s >= 0 && joiner.isEmpty()) {
+ do {} while (task.status >= 0 &&
+ tryHelpStealer(joiner, task) > 0);
+ }
+ joiner.currentJoin = prevJoin;
+ }
}
/**
* Returns a (probably) non-empty steal queue, if one is found
- * during a random, then cyclic scan, else null. This method must
- * be retried by caller if, by the time it tries to use the queue,
- * it is empty.
- */
- private WorkQueue findNonEmptyStealQueue(WorkQueue w) {
- // Similar to loop in scan(), but ignoring submissions
- int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
- int step = (r >>> 16) | 1;
- for (WorkQueue[] ws;;) {
- int rs = runState, m;
- if ((ws = workQueues) == null || (m = ws.length - 1) < 1)
- return null;
- for (int j = (m + 1) << 2; ; r += step) {
- WorkQueue q = ws[((r << 1) | 1) & m];
- if (q != null && !q.isEmpty())
- return q;
- else if (--j < 0) {
- if (runState == rs)
- return null;
- break;
+ * during a scan, else null. This method must be retried by
+ * caller if, by the time it tries to use the queue, it is empty.
+ * @param r a (random) seed for scanning
+ */
+ private WorkQueue findNonEmptyStealQueue(int r) {
+ for (;;) {
+ int ps = plock, m; WorkQueue[] ws; WorkQueue q;
+ if ((ws = workQueues) != null && (m = ws.length - 1) >= 0) {
+ for (int j = (m + 1) << 2; j >= 0; --j) {
+ if ((q = ws[(((r + j) << 1) | 1) & m]) != null &&
+ q.base - q.top < 0)
+ return q;
}
}
+ if (plock == ps)
+ return null;
}
}
-
/**
* Runs tasks until {@code isQuiescent()}. We piggyback on
* active count ctl maintenance, but rather than blocking
@@ -1922,36 +2460,34 @@ public class ForkJoinPool extends AbstractExecutorService {
*/
final void helpQuiescePool(WorkQueue w) {
for (boolean active = true;;) {
- ForkJoinTask<?> localTask; // exhaust local queue
- while ((localTask = w.nextLocalTask()) != null)
- localTask.doExec();
- WorkQueue q = findNonEmptyStealQueue(w);
- if (q != null) {
- ForkJoinTask<?> t; int b;
+ long c; WorkQueue q; ForkJoinTask<?> t; int b;
+ while ((t = w.nextLocalTask()) != null) {
+ if (w.base - w.top < 0)
+ signalWork(w);
+ t.doExec();
+ }
+ if ((q = findNonEmptyStealQueue(w.nextSeed())) != null) {
if (!active) { // re-establish active count
- long c;
active = true;
do {} while (!U.compareAndSwapLong
(this, CTL, c = ctl, c + AC_UNIT));
}
- if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null)
+ if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null) {
+ if (q.base - q.top < 0)
+ signalWork(q);
w.runSubtask(t);
+ }
}
- else {
- long c;
- if (active) { // decrement active count without queuing
+ else if (active) { // decrement active count without queuing
+ long nc = (c = ctl) - AC_UNIT;
+ if ((int)(nc >> AC_SHIFT) + (config & SMASK) == 0)
+ return; // bypass decrement-then-increment
+ if (U.compareAndSwapLong(this, CTL, c, nc))
active = false;
- do {} while (!U.compareAndSwapLong
- (this, CTL, c = ctl, c -= AC_UNIT));
- }
- else
- c = ctl; // re-increment on exit
- if ((int)(c >> AC_SHIFT) + parallelism == 0) {
- do {} while (!U.compareAndSwapLong
- (this, CTL, c = ctl, c + AC_UNIT));
- break;
- }
}
+ else if ((int)((c = ctl) >> AC_SHIFT) + (config & SMASK) == 0 &&
+ U.compareAndSwapLong(this, CTL, c, c + AC_UNIT))
+ return;
}
}
@@ -1965,27 +2501,75 @@ public class ForkJoinPool extends AbstractExecutorService {
WorkQueue q; int b;
if ((t = w.nextLocalTask()) != null)
return t;
- if ((q = findNonEmptyStealQueue(w)) == null)
+ if ((q = findNonEmptyStealQueue(w.nextSeed())) == null)
return null;
- if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null)
+ if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null) {
+ if (q.base - q.top < 0)
+ signalWork(q);
return t;
+ }
}
}
/**
- * Returns the approximate (non-atomic) number of idle threads per
- * active thread to offset steal queue size for method
- * ForkJoinTask.getSurplusQueuedTaskCount().
- */
- final int idlePerActive() {
- // Approximate at powers of two for small values, saturate past 4
- int p = parallelism;
- int a = p + (int)(ctl >> AC_SHIFT);
- return (a > (p >>>= 1) ? 0 :
- a > (p >>>= 1) ? 1 :
- a > (p >>>= 1) ? 2 :
- a > (p >>>= 1) ? 4 :
- 8);
+ * Returns a cheap heuristic guide for task partitioning when
+ * programmers, frameworks, tools, or languages have little or no
+ * idea about task granularity. In essence by offering this
+ * method, we ask users only about tradeoffs in overhead vs
+ * expected throughput and its variance, rather than how finely to
+ * partition tasks.
+ *
+ * In a steady state strict (tree-structured) computation, each
+ * thread makes available for stealing enough tasks for other
+ * threads to remain active. Inductively, if all threads play by
+ * the same rules, each thread should make available only a
+ * constant number of tasks.
+ *
+ * The minimum useful constant is just 1. But using a value of 1
+ * would require immediate replenishment upon each steal to
+ * maintain enough tasks, which is infeasible. Further,
+ * partitionings/granularities of offered tasks should minimize
+ * steal rates, which in general means that threads nearer the top
+ * of computation tree should generate more than those nearer the
+ * bottom. In perfect steady state, each thread is at
+ * approximately the same level of computation tree. However,
+ * producing extra tasks amortizes the uncertainty of progress and
+ * diffusion assumptions.
+ *
+ * So, users will want to use values larger (but not much larger)
+ * than 1 to both smooth over transient shortages and hedge
+ * against uneven progress; as traded off against the cost of
+ * extra task overhead. We leave the user to pick a threshold
+ * value to compare with the results of this call to guide
+ * decisions, but recommend values such as 3.
+ *
+ * When all threads are active, it is on average OK to estimate
+ * surplus strictly locally. In steady-state, if one thread is
+ * maintaining say 2 surplus tasks, then so are others. So we can
+ * just use estimated queue length. However, this strategy alone
+ * leads to serious mis-estimates in some non-steady-state
+ * conditions (ramp-up, ramp-down, other stalls). We can detect
+ * many of these by further considering the number of "idle"
+ * threads, that are known to have zero queued tasks, so
+ * compensate by a factor of (#idle/#active) threads.
+ *
+ * Note: The approximation of #busy workers as #active workers is
+ * not very good under current signalling scheme, and should be
+ * improved.
+ */
+ static int getSurplusQueuedTaskCount() {
+ Thread t; ForkJoinWorkerThread wt; ForkJoinPool pool; WorkQueue q;
+ if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)) {
+ int p = (pool = (wt = (ForkJoinWorkerThread)t).pool).config & SMASK;
+ int n = (q = wt.workQueue).top - q.base;
+ int a = (int)(pool.ctl >> AC_SHIFT) + p;
+ return n - (a > (p >>>= 1) ? 0 :
+ a > (p >>>= 1) ? 1 :
+ a > (p >>>= 1) ? 2 :
+ a > (p >>>= 1) ? 4 :
+ 8);
+ }
+ return 0;
}
// Termination
@@ -2005,56 +2589,71 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return true if now terminating or terminated
*/
private boolean tryTerminate(boolean now, boolean enable) {
- Mutex lock = this.lock;
+ int ps;
+ if (this == common) // cannot shut down
+ return false;
+ if ((ps = plock) >= 0) { // enable by setting plock
+ if (!enable)
+ return false;
+ if ((ps & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ int nps = ((ps + PL_LOCK) & ~SHUTDOWN) | SHUTDOWN;
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
for (long c;;) {
- if (((c = ctl) & STOP_BIT) != 0) { // already terminating
- if ((short)(c >>> TC_SHIFT) == -parallelism) {
- lock.lock(); // don't need try/finally
- termination.signalAll(); // signal when 0 workers
- lock.unlock();
+ if (((c = ctl) & STOP_BIT) != 0) { // already terminating
+ if ((short)(c >>> TC_SHIFT) == -(config & SMASK)) {
+ synchronized (this) {
+ notifyAll(); // signal when 0 workers
+ }
}
return true;
}
- if (runState >= 0) { // not yet enabled
- if (!enable)
+ if (!now) { // check if idle & no tasks
+ WorkQueue[] ws; WorkQueue w;
+ if ((int)(c >> AC_SHIFT) != -(config & SMASK))
return false;
- lock.lock();
- runState |= SHUTDOWN;
- lock.unlock();
- }
- if (!now) { // check if idle & no tasks
- if ((int)(c >> AC_SHIFT) != -parallelism ||
- hasQueuedSubmissions())
- return false;
- // Check for unqueued inactive workers. One pass suffices.
- WorkQueue[] ws = workQueues; WorkQueue w;
- if (ws != null) {
- for (int i = 1; i < ws.length; i += 2) {
- if ((w = ws[i]) != null && w.eventCount >= 0)
- return false;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; ++i) {
+ if ((w = ws[i]) != null) {
+ if (!w.isEmpty()) { // signal unprocessed tasks
+ signalWork(w);
+ return false;
+ }
+ if ((i & 1) != 0 && w.eventCount >= 0)
+ return false; // unqueued inactive worker
+ }
}
}
}
if (U.compareAndSwapLong(this, CTL, c, c | STOP_BIT)) {
for (int pass = 0; pass < 3; ++pass) {
- WorkQueue[] ws = workQueues;
- if (ws != null) {
- WorkQueue w;
+ WorkQueue[] ws; WorkQueue w; Thread wt;
+ if ((ws = workQueues) != null) {
int n = ws.length;
for (int i = 0; i < n; ++i) {
if ((w = ws[i]) != null) {
- w.runState = -1;
+ w.qlock = -1;
if (pass > 0) {
w.cancelAll();
- if (pass > 1)
- w.interruptOwner();
+ if (pass > 1 && (wt = w.owner) != null) {
+ if (!wt.isInterrupted()) {
+ try {
+ wt.interrupt();
+ } catch (Throwable ignore) {
+ }
+ }
+ U.unpark(wt);
+ }
}
}
}
// Wake up workers parked on event queue
int i, e; long cc; Thread p;
while ((e = (int)(cc = ctl) & E_MASK) != 0 &&
- (i = e & SMASK) < n &&
+ (i = e & SMASK) < n && i >= 0 &&
(w = ws[i]) != null) {
long nc = ((long)(w.nextWait & E_MASK) |
((cc + AC_UNIT) & AC_MASK) |
@@ -2062,7 +2661,7 @@ public class ForkJoinPool extends AbstractExecutorService {
if (w.eventCount == (e | INT_SIGN) &&
U.compareAndSwapLong(this, CTL, cc, nc)) {
w.eventCount = (e + E_SEQ) & E_MASK;
- w.runState = -1;
+ w.qlock = -1;
if ((p = w.parker) != null)
U.unpark(p);
}
@@ -2073,6 +2672,135 @@ public class ForkJoinPool extends AbstractExecutorService {
}
}
+ // external operations on common pool
+
+ /**
+ * Returns common pool queue for a thread that has submitted at
+ * least one task.
+ */
+ static WorkQueue commonSubmitterQueue() {
+ ForkJoinPool p; WorkQueue[] ws; int m; Submitter z;
+ return ((z = submitters.get()) != null &&
+ (p = common) != null &&
+ (ws = p.workQueues) != null &&
+ (m = ws.length - 1) >= 0) ?
+ ws[m & z.seed & SQMASK] : null;
+ }
+
+ /**
+ * Tries to pop the given task from submitter's queue in common pool.
+ */
+ static boolean tryExternalUnpush(ForkJoinTask<?> t) {
+ ForkJoinPool p; WorkQueue[] ws; WorkQueue q; Submitter z;
+ ForkJoinTask<?>[] a; int m, s;
+ if (t != null &&
+ (z = submitters.get()) != null &&
+ (p = common) != null &&
+ (ws = p.workQueues) != null &&
+ (m = ws.length - 1) >= 0 &&
+ (q = ws[m & z.seed & SQMASK]) != null &&
+ (s = q.top) != q.base &&
+ (a = q.array) != null) {
+ long j = (((a.length - 1) & (s - 1)) << ASHIFT) + ABASE;
+ if (U.getObject(a, j) == t &&
+ U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ if (q.array == a && q.top == s && // recheck
+ U.compareAndSwapObject(a, j, t, null)) {
+ q.top = s - 1;
+ q.qlock = 0;
+ return true;
+ }
+ q.qlock = 0;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Tries to pop and run local tasks within the same computation
+ * as the given root. On failure, tries to help complete from
+ * other queues via helpComplete.
+ */
+ private void externalHelpComplete(WorkQueue q, ForkJoinTask<?> root) {
+ ForkJoinTask<?>[] a; int m;
+ if (q != null && (a = q.array) != null && (m = (a.length - 1)) >= 0 &&
+ root != null && root.status >= 0) {
+ for (;;) {
+ int s, u; Object o; CountedCompleter<?> task = null;
+ if ((s = q.top) - q.base > 0) {
+ long j = ((m & (s - 1)) << ASHIFT) + ABASE;
+ if ((o = U.getObject(a, j)) != null &&
+ (o instanceof CountedCompleter)) {
+ CountedCompleter<?> t = (CountedCompleter<?>)o, r = t;
+ do {
+ if (r == root) {
+ if (U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ if (q.array == a && q.top == s &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ q.top = s - 1;
+ task = t;
+ }
+ q.qlock = 0;
+ }
+ break;
+ }
+ } while ((r = r.completer) != null);
+ }
+ }
+ if (task != null)
+ task.doExec();
+ if (root.status < 0 ||
+ (u = (int)(ctl >>> 32)) >= 0 || (u >> UAC_SHIFT) >= 0)
+ break;
+ if (task == null) {
+ helpSignal(root, q.poolIndex);
+ if (root.status >= 0)
+ helpComplete(root, SHARED_QUEUE);
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Tries to help execute or signal availability of the given task
+ * from submitter's queue in common pool.
+ */
+ static void externalHelpJoin(ForkJoinTask<?> t) {
+ // Some hard-to-avoid overlap with tryExternalUnpush
+ ForkJoinPool p; WorkQueue[] ws; WorkQueue q, w; Submitter z;
+ ForkJoinTask<?>[] a; int m, s, n;
+ if (t != null &&
+ (z = submitters.get()) != null &&
+ (p = common) != null &&
+ (ws = p.workQueues) != null &&
+ (m = ws.length - 1) >= 0 &&
+ (q = ws[m & z.seed & SQMASK]) != null &&
+ (a = q.array) != null) {
+ int am = a.length - 1;
+ if ((s = q.top) != q.base) {
+ long j = ((am & (s - 1)) << ASHIFT) + ABASE;
+ if (U.getObject(a, j) == t &&
+ U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ if (q.array == a && q.top == s &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ q.top = s - 1;
+ q.qlock = 0;
+ t.doExec();
+ }
+ else
+ q.qlock = 0;
+ }
+ }
+ if (t.status >= 0) {
+ if (t instanceof CountedCompleter)
+ p.externalHelpComplete(q, t);
+ else
+ p.helpSignal(t, q.poolIndex);
+ }
+ }
+ }
+
// Exported methods
// Constructors
@@ -2089,7 +2817,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public ForkJoinPool() {
- this(Runtime.getRuntime().availableProcessors(),
+ this(Math.min(MAX_CAP, Runtime.getRuntime().availableProcessors()),
defaultForkJoinWorkerThreadFactory, null, false);
}
@@ -2144,29 +2872,48 @@ public class ForkJoinPool extends AbstractExecutorService {
throw new NullPointerException();
if (parallelism <= 0 || parallelism > MAX_CAP)
throw new IllegalArgumentException();
- this.parallelism = parallelism;
this.factory = factory;
this.ueh = handler;
- this.localMode = asyncMode ? FIFO_QUEUE : LIFO_QUEUE;
+ this.config = parallelism | (asyncMode ? (FIFO_QUEUE << 16) : 0);
long np = (long)(-parallelism); // offset ctl counts
this.ctl = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
- // Use nearest power 2 for workQueues size. See Hackers Delight sec 3.2.
- int n = parallelism - 1;
- n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16;
- int size = (n + 1) << 1; // #slots = 2*#workers
- this.submitMask = size - 1; // room for max # of submit queues
- this.workQueues = new WorkQueue[size];
- this.termination = (this.lock = new Mutex()).newCondition();
- this.stealCount = new AtomicLong();
- this.nextWorkerNumber = new AtomicInteger();
- int pn = poolNumberGenerator.incrementAndGet();
+ int pn = nextPoolId();
StringBuilder sb = new StringBuilder("ForkJoinPool-");
sb.append(Integer.toString(pn));
sb.append("-worker-");
this.workerNamePrefix = sb.toString();
- lock.lock();
- this.runState = 1; // set init flag
- lock.unlock();
+ }
+
+ /**
+ * Constructor for common pool, suitable only for static initialization.
+ * Basically the same as above, but uses smallest possible initial footprint.
+ */
+ ForkJoinPool(int parallelism, long ctl,
+ ForkJoinWorkerThreadFactory factory,
+ Thread.UncaughtExceptionHandler handler) {
+ this.config = parallelism;
+ this.ctl = ctl;
+ this.factory = factory;
+ this.ueh = handler;
+ this.workerNamePrefix = "ForkJoinPool.commonPool-worker-";
+ }
+
+ /**
+ * Returns the common pool instance. This pool is statically
+ * constructed; its run state is unaffected by attempts to {@link
+ * #shutdown} or {@link #shutdownNow}. However this pool and any
+ * ongoing processing are automatically terminated upon program
+ * {@link System#exit}. Any program that relies on asynchronous
+ * task processing to complete before program termination should
+ * invoke {@code commonPool().}{@link #awaitQuiescence}, before
+ * exit.
+ *
+ * @return the common pool instance
+ * @since 1.8
+ */
+ public static ForkJoinPool commonPool() {
+ // assert common != null : "static init error";
+ return common;
}
// Execution methods
@@ -2190,7 +2937,7 @@ public class ForkJoinPool extends AbstractExecutorService {
public <T> T invoke(ForkJoinTask<T> task) {
if (task == null)
throw new NullPointerException();
- doSubmit(task);
+ externalPush(task);
return task.join();
}
@@ -2205,7 +2952,7 @@ public class ForkJoinPool extends AbstractExecutorService {
public void execute(ForkJoinTask<?> task) {
if (task == null)
throw new NullPointerException();
- doSubmit(task);
+ externalPush(task);
}
// AbstractExecutorService methods
@@ -2223,7 +2970,7 @@ public class ForkJoinPool extends AbstractExecutorService {
job = (ForkJoinTask<?>) task;
else
job = new ForkJoinTask.AdaptedRunnableAction(task);
- doSubmit(job);
+ externalPush(job);
}
/**
@@ -2238,7 +2985,7 @@ public class ForkJoinPool extends AbstractExecutorService {
public <T> ForkJoinTask<T> submit(ForkJoinTask<T> task) {
if (task == null)
throw new NullPointerException();
- doSubmit(task);
+ externalPush(task);
return task;
}
@@ -2249,7 +2996,7 @@ public class ForkJoinPool extends AbstractExecutorService {
*/
public <T> ForkJoinTask<T> submit(Callable<T> task) {
ForkJoinTask<T> job = new ForkJoinTask.AdaptedCallable<T>(task);
- doSubmit(job);
+ externalPush(job);
return job;
}
@@ -2260,7 +3007,7 @@ public class ForkJoinPool extends AbstractExecutorService {
*/
public <T> ForkJoinTask<T> submit(Runnable task, T result) {
ForkJoinTask<T> job = new ForkJoinTask.AdaptedRunnable<T>(task, result);
- doSubmit(job);
+ externalPush(job);
return job;
}
@@ -2277,7 +3024,7 @@ public class ForkJoinPool extends AbstractExecutorService {
job = (ForkJoinTask<?>) task;
else
job = new ForkJoinTask.AdaptedRunnableAction(task);
- doSubmit(job);
+ externalPush(job);
return job;
}
@@ -2289,27 +3036,23 @@ public class ForkJoinPool extends AbstractExecutorService {
// In previous versions of this class, this method constructed
// a task to run ForkJoinTask.invokeAll, but now external
// invocation of multiple tasks is at least as efficient.
- List<ForkJoinTask<T>> fs = new ArrayList<ForkJoinTask<T>>(tasks.size());
- // Workaround needed because method wasn't declared with
- // wildcards in return type but should have been.
- @SuppressWarnings({"unchecked", "rawtypes"})
- List<Future<T>> futures = (List<Future<T>>) (List) fs;
+ ArrayList<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
boolean done = false;
try {
for (Callable<T> t : tasks) {
ForkJoinTask<T> f = new ForkJoinTask.AdaptedCallable<T>(t);
- doSubmit(f);
- fs.add(f);
+ futures.add(f);
+ externalPush(f);
}
- for (ForkJoinTask<T> f : fs)
- f.quietlyJoin();
+ for (int i = 0, size = futures.size(); i < size; i++)
+ ((ForkJoinTask<?>)futures.get(i)).quietlyJoin();
done = true;
return futures;
} finally {
if (!done)
- for (ForkJoinTask<T> f : fs)
- f.cancel(false);
+ for (int i = 0, size = futures.size(); i < size; i++)
+ futures.get(i).cancel(false);
}
}
@@ -2338,7 +3081,17 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return the targeted parallelism level of this pool
*/
public int getParallelism() {
- return parallelism;
+ return config & SMASK;
+ }
+
+ /**
+ * Returns the targeted parallelism level of the common pool.
+ *
+ * @return the targeted parallelism level of the common pool
+ * @since 1.8
+ */
+ public static int getCommonPoolParallelism() {
+ return commonParallelism;
}
/**
@@ -2350,7 +3103,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return the number of worker threads
*/
public int getPoolSize() {
- return parallelism + (short)(ctl >>> TC_SHIFT);
+ return (config & SMASK) + (short)(ctl >>> TC_SHIFT);
}
/**
@@ -2360,7 +3113,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return {@code true} if this pool uses async mode
*/
public boolean getAsyncMode() {
- return localMode != 0;
+ return (config >>> 16) == FIFO_QUEUE;
}
/**
@@ -2391,7 +3144,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return the number of active threads
*/
public int getActiveThreadCount() {
- int r = parallelism + (int)(ctl >> AC_SHIFT);
+ int r = (config & SMASK) + (int)(ctl >> AC_SHIFT);
return (r <= 0) ? 0 : r; // suppress momentarily negative values
}
@@ -2407,7 +3160,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return {@code true} if all threads are currently idle
*/
public boolean isQuiescent() {
- return (int)(ctl >> AC_SHIFT) + parallelism == 0;
+ return (int)(ctl >> AC_SHIFT) + (config & SMASK) == 0;
}
/**
@@ -2422,12 +3175,12 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return the number of steals
*/
public long getStealCount() {
- long count = stealCount.get();
+ long count = stealCount;
WorkQueue[] ws; WorkQueue w;
if ((ws = workQueues) != null) {
for (int i = 1; i < ws.length; i += 2) {
if ((w = ws[i]) != null)
- count += w.totalSteals;
+ count += w.nsteals;
}
}
return count;
@@ -2552,7 +3305,7 @@ public class ForkJoinPool extends AbstractExecutorService {
public String toString() {
// Use a single pass through workQueues to collect counts
long qt = 0L, qs = 0L; int rc = 0;
- long st = stealCount.get();
+ long st = stealCount;
long c = ctl;
WorkQueue[] ws; WorkQueue w;
if ((ws = workQueues) != null) {
@@ -2563,14 +3316,14 @@ public class ForkJoinPool extends AbstractExecutorService {
qs += size;
else {
qt += size;
- st += w.totalSteals;
+ st += w.nsteals;
if (w.isApparentlyUnblocked())
++rc;
}
}
}
}
- int pc = parallelism;
+ int pc = (config & SMASK);
int tc = pc + (short)(c >>> TC_SHIFT);
int ac = pc + (int)(c >> AC_SHIFT);
if (ac < 0) // ignore transient negative
@@ -2579,7 +3332,7 @@ public class ForkJoinPool extends AbstractExecutorService {
if ((c & STOP_BIT) != 0)
level = (tc == 0) ? "Terminated" : "Terminating";
else
- level = runState < 0 ? "Shutting down" : "Running";
+ level = plock < 0 ? "Shutting down" : "Running";
return super.toString() +
"[" + level +
", parallelism = " + pc +
@@ -2593,11 +3346,13 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
- * Initiates an orderly shutdown in which previously submitted
- * tasks are executed, but no new tasks will be accepted.
- * Invocation has no additional effect if already shut down.
- * Tasks that are in the process of being submitted concurrently
- * during the course of this method may or may not be rejected.
+ * Possibly initiates an orderly shutdown in which previously
+ * submitted tasks are executed, but no new tasks will be
+ * accepted. Invocation has no effect on execution state if this
+ * is the {@link #commonPool()}, and no additional effect if
+ * already shut down. Tasks that are in the process of being
+ * submitted concurrently during the course of this method may or
+ * may not be rejected.
*
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
@@ -2610,14 +3365,16 @@ public class ForkJoinPool extends AbstractExecutorService {
}
/**
- * Attempts to cancel and/or stop all tasks, and reject all
- * subsequently submitted tasks. Tasks that are in the process of
- * being submitted or executed concurrently during the course of
- * this method may or may not be rejected. This method cancels
- * both existing and unexecuted tasks, in order to permit
- * termination in the presence of task dependencies. So the method
- * always returns an empty list (unlike the case for some other
- * Executors).
+ * Possibly attempts to cancel and/or stop all tasks, and reject
+ * all subsequently submitted tasks. Invocation has no effect on
+ * execution state if this is the {@link #commonPool()}, and no
+ * additional effect if already shut down. Otherwise, tasks that
+ * are in the process of being submitted or executed concurrently
+ * during the course of this method may or may not be
+ * rejected. This method cancels both existing and unexecuted
+ * tasks, in order to permit termination in the presence of task
+ * dependencies. So the method always returns an empty list
+ * (unlike the case for some other Executors).
*
* @return an empty list
* @throws SecurityException if a security manager exists and
@@ -2639,7 +3396,7 @@ public class ForkJoinPool extends AbstractExecutorService {
public boolean isTerminated() {
long c = ctl;
return ((c & STOP_BIT) != 0L &&
- (short)(c >>> TC_SHIFT) == -parallelism);
+ (short)(c >>> TC_SHIFT) == -(config & SMASK));
}
/**
@@ -2647,7 +3404,7 @@ public class ForkJoinPool extends AbstractExecutorService {
* commenced but not yet completed. This method may be useful for
* debugging. A return of {@code true} reported a sufficient
* period after shutdown may indicate that submitted tasks have
- * ignored or suppressed interruption, or are waiting for IO,
+ * ignored or suppressed interruption, or are waiting for I/O,
* causing this executor not to properly terminate. (See the
* advisory notes for class {@link ForkJoinTask} stating that
* tasks should not normally entail blocking operations. But if
@@ -2658,7 +3415,7 @@ public class ForkJoinPool extends AbstractExecutorService {
public boolean isTerminating() {
long c = ctl;
return ((c & STOP_BIT) != 0L &&
- (short)(c >>> TC_SHIFT) != -parallelism);
+ (short)(c >>> TC_SHIFT) != -(config & SMASK));
}
/**
@@ -2667,13 +3424,16 @@ public class ForkJoinPool extends AbstractExecutorService {
* @return {@code true} if this pool has been shut down
*/
public boolean isShutdown() {
- return runState < 0;
+ return plock < 0;
}
/**
- * Blocks until all tasks have completed execution after a shutdown
- * request, or the timeout occurs, or the current thread is
- * interrupted, whichever happens first.
+ * Blocks until all tasks have completed execution after a
+ * shutdown request, or the timeout occurs, or the current thread
+ * is interrupted, whichever happens first. Because the {@link
+ * #commonPool()} never terminates until program shutdown, when
+ * applied to the common pool, this method is equivalent to {@link
+ * #awaitQuiescence} but always returns {@code false}.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
@@ -2683,20 +3443,84 @@ public class ForkJoinPool extends AbstractExecutorService {
*/
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
+ if (Thread.interrupted())
+ throw new InterruptedException();
+ if (this == common) {
+ awaitQuiescence(timeout, unit);
+ return false;
+ }
long nanos = unit.toNanos(timeout);
- final Mutex lock = this.lock;
- lock.lock();
- try {
- for (;;) {
- if (isTerminated())
- return true;
- if (nanos <= 0)
+ if (isTerminated())
+ return true;
+ long startTime = System.nanoTime();
+ boolean terminated = false;
+ synchronized (this) {
+ for (long waitTime = nanos, millis = 0L;;) {
+ if (terminated = isTerminated() ||
+ waitTime <= 0L ||
+ (millis = unit.toMillis(waitTime)) <= 0L)
+ break;
+ wait(millis);
+ waitTime = nanos - (System.nanoTime() - startTime);
+ }
+ }
+ return terminated;
+ }
+
+ /**
+ * If called by a ForkJoinTask operating in this pool, equivalent
+ * in effect to {@link ForkJoinTask#helpQuiesce}. Otherwise,
+ * waits and/or attempts to assist performing tasks until this
+ * pool {@link #isQuiescent} or the indicated timeout elapses.
+ *
+ * @param timeout the maximum time to wait
+ * @param unit the time unit of the timeout argument
+ * @return {@code true} if quiescent; {@code false} if the
+ * timeout elapsed.
+ */
+ public boolean awaitQuiescence(long timeout, TimeUnit unit) {
+ long nanos = unit.toNanos(timeout);
+ ForkJoinWorkerThread wt;
+ Thread thread = Thread.currentThread();
+ if ((thread instanceof ForkJoinWorkerThread) &&
+ (wt = (ForkJoinWorkerThread)thread).pool == this) {
+ helpQuiescePool(wt.workQueue);
+ return true;
+ }
+ long startTime = System.nanoTime();
+ WorkQueue[] ws;
+ int r = 0, m;
+ boolean found = true;
+ while (!isQuiescent() && (ws = workQueues) != null &&
+ (m = ws.length - 1) >= 0) {
+ if (!found) {
+ if ((System.nanoTime() - startTime) > nanos)
return false;
- nanos = termination.awaitNanos(nanos);
+ Thread.yield(); // cannot block
+ }
+ found = false;
+ for (int j = (m + 1) << 2; j >= 0; --j) {
+ ForkJoinTask<?> t; WorkQueue q; int b;
+ if ((q = ws[r++ & m]) != null && (b = q.base) - q.top < 0) {
+ found = true;
+ if ((t = q.pollAt(b)) != null) {
+ if (q.base - q.top < 0)
+ signalWork(q);
+ t.doExec();
+ }
+ break;
+ }
}
- } finally {
- lock.unlock();
}
+ return true;
+ }
+
+ /**
+ * Waits and/or attempts to assist performing tasks indefinitely
+ * until the {@link #commonPool()} {@link #isQuiescent}.
+ */
+ static void quiesceCommonPool() {
+ common.awaitQuiescence(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
}
/**
@@ -2795,19 +3619,37 @@ public class ForkJoinPool extends AbstractExecutorService {
public static void managedBlock(ManagedBlocker blocker)
throws InterruptedException {
Thread t = Thread.currentThread();
- ForkJoinPool p = ((t instanceof ForkJoinWorkerThread) ?
- ((ForkJoinWorkerThread)t).pool : null);
- while (!blocker.isReleasable()) {
- if (p == null || p.tryCompensate(null, blocker)) {
- try {
- do {} while (!blocker.isReleasable() && !blocker.block());
- } finally {
- if (p != null)
+ if (t instanceof ForkJoinWorkerThread) {
+ ForkJoinPool p = ((ForkJoinWorkerThread)t).pool;
+ while (!blocker.isReleasable()) { // variant of helpSignal
+ WorkQueue[] ws; WorkQueue q; int m, u;
+ if ((ws = p.workQueues) != null && (m = ws.length - 1) >= 0) {
+ for (int i = 0; i <= m; ++i) {
+ if (blocker.isReleasable())
+ return;
+ if ((q = ws[i]) != null && q.base - q.top < 0) {
+ p.signalWork(q);
+ if ((u = (int)(p.ctl >>> 32)) >= 0 ||
+ (u >> UAC_SHIFT) >= 0)
+ break;
+ }
+ }
+ }
+ if (p.tryCompensate()) {
+ try {
+ do {} while (!blocker.isReleasable() &&
+ !blocker.block());
+ } finally {
p.incrementActiveCount();
+ }
+ break;
}
- break;
}
}
+ else {
+ do {} while (!blocker.isReleasable() &&
+ !blocker.block());
+ }
}
// AbstractExecutorService overrides. These rely on undocumented
@@ -2828,32 +3670,80 @@ public class ForkJoinPool extends AbstractExecutorService {
private static final long PARKBLOCKER;
private static final int ABASE;
private static final int ASHIFT;
+ private static final long STEALCOUNT;
+ private static final long PLOCK;
+ private static final long INDEXSEED;
+ private static final long QLOCK;
static {
- poolNumberGenerator = new AtomicInteger();
- nextSubmitterSeed = new AtomicInteger(0x55555555);
- modifyThreadPermission = new RuntimePermission("modifyThread");
- defaultForkJoinWorkerThreadFactory =
- new DefaultForkJoinWorkerThreadFactory();
- submitters = new ThreadSubmitter();
- int s;
+ // initialize field offsets for CAS etc
try {
U = getUnsafe();
Class<?> k = ForkJoinPool.class;
- Class<?> ak = ForkJoinTask[].class;
CTL = U.objectFieldOffset
(k.getDeclaredField("ctl"));
+ STEALCOUNT = U.objectFieldOffset
+ (k.getDeclaredField("stealCount"));
+ PLOCK = U.objectFieldOffset
+ (k.getDeclaredField("plock"));
+ INDEXSEED = U.objectFieldOffset
+ (k.getDeclaredField("indexSeed"));
Class<?> tk = Thread.class;
PARKBLOCKER = U.objectFieldOffset
(tk.getDeclaredField("parkBlocker"));
+ Class<?> wk = WorkQueue.class;
+ QLOCK = U.objectFieldOffset
+ (wk.getDeclaredField("qlock"));
+ Class<?> ak = ForkJoinTask[].class;
ABASE = U.arrayBaseOffset(ak);
- s = U.arrayIndexScale(ak);
+ int scale = U.arrayIndexScale(ak);
+ if ((scale & (scale - 1)) != 0)
+ throw new Error("data type scale not a power of two");
+ ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
} catch (Exception e) {
throw new Error(e);
}
- if ((s & (s-1)) != 0)
- throw new Error("data type scale not a power of two");
- ASHIFT = 31 - Integer.numberOfLeadingZeros(s);
+
+ submitters = new ThreadLocal<Submitter>();
+ ForkJoinWorkerThreadFactory fac = defaultForkJoinWorkerThreadFactory =
+ new DefaultForkJoinWorkerThreadFactory();
+ modifyThreadPermission = new RuntimePermission("modifyThread");
+
+ /*
+ * Establish common pool parameters. For extra caution,
+ * computations to set up common pool state are here; the
+ * constructor just assigns these values to fields.
+ */
+
+ int par = 0;
+ Thread.UncaughtExceptionHandler handler = null;
+ try { // TBD: limit or report ignored exceptions?
+ String pp = System.getProperty
+ ("java.util.concurrent.ForkJoinPool.common.parallelism");
+ String hp = System.getProperty
+ ("java.util.concurrent.ForkJoinPool.common.exceptionHandler");
+ String fp = System.getProperty
+ ("java.util.concurrent.ForkJoinPool.common.threadFactory");
+ if (fp != null)
+ fac = ((ForkJoinWorkerThreadFactory)ClassLoader.
+ getSystemClassLoader().loadClass(fp).newInstance());
+ if (hp != null)
+ handler = ((Thread.UncaughtExceptionHandler)ClassLoader.
+ getSystemClassLoader().loadClass(hp).newInstance());
+ if (pp != null)
+ par = Integer.parseInt(pp);
+ } catch (Exception ignore) {
+ }
+
+ if (par <= 0)
+ par = Runtime.getRuntime().availableProcessors();
+ if (par > MAX_CAP)
+ par = MAX_CAP;
+ commonParallelism = par;
+ long np = (long)(-par); // precompute initial ctl value
+ long ct = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
+
+ common = new ForkJoinPool(par, ct, fac, handler);
}
/**
@@ -2866,5 +3756,4 @@ public class ForkJoinPool extends AbstractExecutorService {
private static sun.misc.Unsafe getUnsafe() {
return scala.concurrent.util.Unsafe.instance;
}
-
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index 839fd26b39..fd1e132b07 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -5,6 +5,7 @@
*/
package scala.concurrent.forkjoin;
+
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
@@ -29,15 +30,18 @@ import java.lang.reflect.Constructor;
* subtasks may be hosted by a small number of actual threads in a
* ForkJoinPool, at the price of some usage limitations.
*
- * <p>A "main" {@code ForkJoinTask} begins execution when submitted
- * to a {@link ForkJoinPool}. Once started, it will usually in turn
- * start other subtasks. As indicated by the name of this class,
- * many programs using {@code ForkJoinTask} employ only methods
- * {@link #fork} and {@link #join}, or derivatives such as {@link
+ * <p>A "main" {@code ForkJoinTask} begins execution when it is
+ * explicitly submitted to a {@link ForkJoinPool}, or, if not already
+ * engaged in a ForkJoin computation, commenced in the {@link
+ * ForkJoinPool#commonPool()} via {@link #fork}, {@link #invoke}, or
+ * related methods. Once started, it will usually in turn start other
+ * subtasks. As indicated by the name of this class, many programs
+ * using {@code ForkJoinTask} employ only methods {@link #fork} and
+ * {@link #join}, or derivatives such as {@link
* #invokeAll(ForkJoinTask...) invokeAll}. However, this class also
* provides a number of other methods that can come into play in
- * advanced usages, as well as extension mechanics that allow
- * support of new forms of fork/join processing.
+ * advanced usages, as well as extension mechanics that allow support
+ * of new forms of fork/join processing.
*
* <p>A {@code ForkJoinTask} is a lightweight form of {@link Future}.
* The efficiency of {@code ForkJoinTask}s stems from a set of
@@ -51,7 +55,7 @@ import java.lang.reflect.Constructor;
* minimize other blocking synchronization apart from joining other
* tasks or using synchronizers such as Phasers that are advertised to
* cooperate with fork/join scheduling. Subdividable tasks should also
- * not perform blocking IO, and should ideally access variables that
+ * not perform blocking I/O, and should ideally access variables that
* are completely independent of those accessed by other running
* tasks. These guidelines are loosely enforced by not permitting
* checked exceptions such as {@code IOExceptions} to be
@@ -69,10 +73,11 @@ import java.lang.reflect.Constructor;
* <p>It is possible to define and use ForkJoinTasks that may block,
* but doing do requires three further considerations: (1) Completion
* of few if any <em>other</em> tasks should be dependent on a task
- * that blocks on external synchronization or IO. Event-style async
- * tasks that are never joined often fall into this category. (2) To
- * minimize resource impact, tasks should be small; ideally performing
- * only the (possibly) blocking action. (3) Unless the {@link
+ * that blocks on external synchronization or I/O. Event-style async
+ * tasks that are never joined (for example, those subclassing {@link
+ * CountedCompleter}) often fall into this category. (2) To minimize
+ * resource impact, tasks should be small; ideally performing only the
+ * (possibly) blocking action. (3) Unless the {@link
* ForkJoinPool.ManagedBlocker} API is used, or the number of possibly
* blocked tasks is known to be less than the pool's {@link
* ForkJoinPool#getParallelism} level, the pool cannot guarantee that
@@ -121,13 +126,7 @@ import java.lang.reflect.Constructor;
* other actions. Normally, a concrete ForkJoinTask subclass declares
* fields comprising its parameters, established in a constructor, and
* then defines a {@code compute} method that somehow uses the control
- * methods supplied by this base class. While these methods have
- * {@code public} access (to allow instances of different task
- * subclasses to call each other's methods), some of them may only be
- * called from within other ForkJoinTasks (as may be determined using
- * method {@link #inForkJoinPool}). Attempts to invoke them in other
- * contexts result in exceptions or errors, possibly including {@code
- * ClassCastException}.
+ * methods supplied by this base class.
*
* <p>Method {@link #join} and its variants are appropriate for use
* only when completion dependencies are acyclic; that is, the
@@ -138,17 +137,16 @@ import java.lang.reflect.Constructor;
* {@link Phaser}, {@link #helpQuiesce}, and {@link #complete}) that
* may be of use in constructing custom subclasses for problems that
* are not statically structured as DAGs. To support such usages a
- * ForkJoinTask may be atomically <em>tagged</em> with a {@code
- * short} value using {@link #setForkJoinTaskTag} or {@link
+ * ForkJoinTask may be atomically <em>tagged</em> with a {@code short}
+ * value using {@link #setForkJoinTaskTag} or {@link
* #compareAndSetForkJoinTaskTag} and checked using {@link
- * #getForkJoinTaskTag}. The ForkJoinTask implementation does not
- * use these {@code protected} methods or tags for any purpose, but
- * they may be of use in the construction of specialized subclasses.
- * For example, parallel graph traversals can use the supplied methods
- * to avoid revisiting nodes/tasks that have already been processed.
- * Also, completion based designs can use them to record that subtasks
- * have completed. (Method names for tagging are bulky in part to
- * encourage definition of methods that reflect their usage patterns.)
+ * #getForkJoinTaskTag}. The ForkJoinTask implementation does not use
+ * these {@code protected} methods or tags for any purpose, but they
+ * may be of use in the construction of specialized subclasses. For
+ * example, parallel graph traversals can use the supplied methods to
+ * avoid revisiting nodes/tasks that have already been processed.
+ * (Method names for tagging are bulky in part to encourage definition
+ * of methods that reflect their usage patterns.)
*
* <p>Most base support methods are {@code final}, to prevent
* overriding of implementations that are intrinsically tied to the
@@ -286,8 +284,9 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* @return status upon completion
*/
private int externalAwaitDone() {
- boolean interrupted = false;
int s;
+ ForkJoinPool.externalHelpJoin(this);
+ boolean interrupted = false;
while ((s = status) >= 0) {
if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
synchronized (this) {
@@ -315,6 +314,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
int s;
if (Thread.interrupted())
throw new InterruptedException();
+ ForkJoinPool.externalHelpJoin(this);
while ((s = status) >= 0) {
if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
synchronized (this) {
@@ -328,6 +328,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
return s;
}
+
/**
* Implementation for join, get, quietlyJoin. Directly handles
* only cases of already-completed, external wait, and
@@ -337,16 +338,12 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
*/
private int doJoin() {
int s; Thread t; ForkJoinWorkerThread wt; ForkJoinPool.WorkQueue w;
- if ((s = status) >= 0) {
- if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)) {
- if (!(w = (wt = (ForkJoinWorkerThread)t).workQueue).
- tryUnpush(this) || (s = doExec()) >= 0)
- s = wt.pool.awaitJoin(w, this);
- }
- else
- s = externalAwaitDone();
- }
- return s;
+ return (s = status) < 0 ? s :
+ ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ (w = (wt = (ForkJoinWorkerThread)t).workQueue).
+ tryUnpush(this) && (s = doExec()) < 0 ? s :
+ wt.pool.awaitJoin(w, this) :
+ externalAwaitDone();
}
/**
@@ -356,14 +353,10 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
*/
private int doInvoke() {
int s; Thread t; ForkJoinWorkerThread wt;
- if ((s = doExec()) >= 0) {
- if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
- s = (wt = (ForkJoinWorkerThread)t).pool.awaitJoin(wt.workQueue,
- this);
- else
- s = externalAwaitDone();
- }
- return s;
+ return (s = doExec()) < 0 ? s :
+ ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ (wt = (ForkJoinWorkerThread)t).pool.awaitJoin(wt.workQueue, this) :
+ externalAwaitDone();
}
// Exception table support
@@ -411,11 +404,11 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
/**
- * Records exception and sets exceptional completion.
+ * Records exception and sets status.
*
* @return status on exit
*/
- private int setExceptionalCompletion(Throwable ex) {
+ final int recordExceptionalCompletion(Throwable ex) {
int s;
if ((s = status) >= 0) {
int h = System.identityHashCode(this);
@@ -438,17 +431,25 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
s = setCompletion(EXCEPTIONAL);
}
- ForkJoinTask<?> p = internalGetCompleter(); // propagate
- if (p != null && p.status >= 0)
- p.setExceptionalCompletion(ex);
return s;
}
/**
- * Exception propagation support for tasks with completers.
+ * Records exception and possibly propagates.
+ *
+ * @return status on exit
*/
- ForkJoinTask<?> internalGetCompleter() {
- return null;
+ private int setExceptionalCompletion(Throwable ex) {
+ int s = recordExceptionalCompletion(ex);
+ if ((s & DONE_MASK) == EXCEPTIONAL)
+ internalPropagateException(ex);
+ return s;
+ }
+
+ /**
+ * Hook for exception propagation support for tasks with completers.
+ */
+ void internalPropagateException(Throwable ex) {
}
/**
@@ -467,7 +468,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
/**
- * Removes exception node and clears status
+ * Removes exception node and clears status.
*/
private void clearExceptionalCompletion() {
int h = System.identityHashCode(this);
@@ -595,7 +596,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
}
}
-
+
/**
* A version of "sneaky throw" to relay exceptions
*/
@@ -624,35 +625,35 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* Throws exception, if any, associated with the given status.
*/
private void reportException(int s) {
- Throwable ex = ((s == CANCELLED) ? new CancellationException() :
- (s == EXCEPTIONAL) ? getThrowableException() :
- null);
- if (ex != null)
- ForkJoinTask.rethrow(ex);
+ if (s == CANCELLED)
+ throw new CancellationException();
+ if (s == EXCEPTIONAL)
+ rethrow(getThrowableException());
}
// public methods
/**
- * Arranges to asynchronously execute this task. While it is not
- * necessarily enforced, it is a usage error to fork a task more
- * than once unless it has completed and been reinitialized.
- * Subsequent modifications to the state of this task or any data
- * it operates on are not necessarily consistently observable by
- * any thread other than the one executing it unless preceded by a
- * call to {@link #join} or related methods, or a call to {@link
- * #isDone} returning {@code true}.
- *
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
+ * Arranges to asynchronously execute this task in the pool the
+ * current task is running in, if applicable, or using the {@link
+ * ForkJoinPool#commonPool()} if not {@link #inForkJoinPool}. While
+ * it is not necessarily enforced, it is a usage error to fork a
+ * task more than once unless it has completed and been
+ * reinitialized. Subsequent modifications to the state of this
+ * task or any data it operates on are not necessarily
+ * consistently observable by any thread other than the one
+ * executing it unless preceded by a call to {@link #join} or
+ * related methods, or a call to {@link #isDone} returning {@code
+ * true}.
*
* @return {@code this}, to simplify usage
*/
public final ForkJoinTask<V> fork() {
- ((ForkJoinWorkerThread)Thread.currentThread()).workQueue.push(this);
+ Thread t;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ ((ForkJoinWorkerThread)t).workQueue.push(this);
+ else
+ ForkJoinPool.common.externalPush(this);
return this;
}
@@ -702,12 +703,6 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* cancelled, completed normally or exceptionally, or left
* unprocessed.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @param t1 the first task
* @param t2 the second task
* @throws NullPointerException if any task is null
@@ -733,12 +728,6 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* related methods to check if they have been cancelled, completed
* normally or exceptionally, or left unprocessed.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @param tasks the tasks
* @throws NullPointerException if any task is null
*/
@@ -766,7 +755,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
}
if (ex != null)
- ForkJoinTask.rethrow(ex);
+ rethrow(ex);
}
/**
@@ -782,12 +771,6 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* cancelled, completed normally or exceptionally, or left
* unprocessed.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @param tasks the collection of tasks
* @return the tasks argument, to simplify usage
* @throws NullPointerException if tasks or any element are null
@@ -823,7 +806,7 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
}
if (ex != null)
- ForkJoinTask.rethrow(ex);
+ rethrow(ex);
return tasks;
}
@@ -996,8 +979,9 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
if (Thread.interrupted())
throw new InterruptedException();
// Messy in part because we measure in nanosecs, but wait in millisecs
- int s; long ns, ms;
- if ((s = status) >= 0 && (ns = unit.toNanos(timeout)) > 0L) {
+ int s; long ms;
+ long ns = unit.toNanos(timeout);
+ if ((s = status) >= 0 && ns > 0L) {
long deadline = System.nanoTime() + ns;
ForkJoinPool p = null;
ForkJoinPool.WorkQueue w = null;
@@ -1006,16 +990,18 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
p = wt.pool;
w = wt.workQueue;
- s = p.helpJoinOnce(w, this); // no retries on failure
+ p.helpJoinOnce(w, this); // no retries on failure
}
+ else
+ ForkJoinPool.externalHelpJoin(this);
boolean canBlock = false;
boolean interrupted = false;
try {
while ((s = status) >= 0) {
- if (w != null && w.runState < 0)
+ if (w != null && w.qlock < 0)
cancelIgnoringExceptions(this);
else if (!canBlock) {
- if (p == null || p.tryCompensate(this, null))
+ if (p == null || p.tryCompensate())
canBlock = true;
}
else {
@@ -1083,17 +1069,15 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* be of use in designs in which many tasks are forked, but none
* are explicitly joined, instead executing them until all are
* processed.
- *
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
*/
public static void helpQuiesce() {
- ForkJoinWorkerThread wt =
- (ForkJoinWorkerThread)Thread.currentThread();
- wt.pool.helpQuiescePool(wt.workQueue);
+ Thread t;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) {
+ ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
+ wt.pool.helpQuiescePool(wt.workQueue);
+ }
+ else
+ ForkJoinPool.quiesceCommonPool();
}
/**
@@ -1146,23 +1130,19 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/**
* Tries to unschedule this task for execution. This method will
- * typically succeed if this task is the most recently forked task
- * by the current thread, and has not commenced executing in
- * another thread. This method may be useful when arranging
- * alternative local processing of tasks that could have been, but
- * were not, stolen.
- *
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
+ * typically (but is not guaranteed to) succeed if this task is
+ * the most recently forked task by the current thread, and has
+ * not commenced executing in another thread. This method may be
+ * useful when arranging alternative local processing of tasks
+ * that could have been, but were not, stolen.
*
* @return {@code true} if unforked
*/
public boolean tryUnfork() {
- return ((ForkJoinWorkerThread)Thread.currentThread())
- .workQueue.tryUnpush(this);
+ Thread t;
+ return (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread)t).workQueue.tryUnpush(this) :
+ ForkJoinPool.tryExternalUnpush(this));
}
/**
@@ -1171,84 +1151,32 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* value may be useful for heuristic decisions about whether to
* fork other tasks.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @return the number of tasks
*/
public static int getQueuedTaskCount() {
- return ((ForkJoinWorkerThread) Thread.currentThread())
- .workQueue.queueSize();
+ Thread t; ForkJoinPool.WorkQueue q;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ q = ((ForkJoinWorkerThread)t).workQueue;
+ else
+ q = ForkJoinPool.commonSubmitterQueue();
+ return (q == null) ? 0 : q.queueSize();
}
/**
* Returns an estimate of how many more locally queued tasks are
* held by the current worker thread than there are other worker
- * threads that might steal them. This value may be useful for
+ * threads that might steal them, or zero if this thread is not
+ * operating in a ForkJoinPool. This value may be useful for
* heuristic decisions about whether to fork other tasks. In many
* usages of ForkJoinTasks, at steady state, each worker should
* aim to maintain a small constant surplus (for example, 3) of
* tasks, and to process computations locally if this threshold is
* exceeded.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @return the surplus number of tasks, which may be negative
*/
public static int getSurplusQueuedTaskCount() {
- /*
- * The aim of this method is to return a cheap heuristic guide
- * for task partitioning when programmers, frameworks, tools,
- * or languages have little or no idea about task granularity.
- * In essence by offering this method, we ask users only about
- * tradeoffs in overhead vs expected throughput and its
- * variance, rather than how finely to partition tasks.
- *
- * In a steady state strict (tree-structured) computation,
- * each thread makes available for stealing enough tasks for
- * other threads to remain active. Inductively, if all threads
- * play by the same rules, each thread should make available
- * only a constant number of tasks.
- *
- * The minimum useful constant is just 1. But using a value of
- * 1 would require immediate replenishment upon each steal to
- * maintain enough tasks, which is infeasible. Further,
- * partitionings/granularities of offered tasks should
- * minimize steal rates, which in general means that threads
- * nearer the top of computation tree should generate more
- * than those nearer the bottom. In perfect steady state, each
- * thread is at approximately the same level of computation
- * tree. However, producing extra tasks amortizes the
- * uncertainty of progress and diffusion assumptions.
- *
- * So, users will want to use values larger, but not much
- * larger than 1 to both smooth over transient shortages and
- * hedge against uneven progress; as traded off against the
- * cost of extra task overhead. We leave the user to pick a
- * threshold value to compare with the results of this call to
- * guide decisions, but recommend values such as 3.
- *
- * When all threads are active, it is on average OK to
- * estimate surplus strictly locally. In steady-state, if one
- * thread is maintaining say 2 surplus tasks, then so are
- * others. So we can just use estimated queue length.
- * However, this strategy alone leads to serious mis-estimates
- * in some non-steady-state conditions (ramp-up, ramp-down,
- * other stalls). We can detect many of these by further
- * considering the number of "idle" threads, that are known to
- * have zero queued tasks, so compensate by a factor of
- * (#idle/#active) threads.
- */
- ForkJoinWorkerThread wt =
- (ForkJoinWorkerThread)Thread.currentThread();
- return wt.workQueue.queueSize() - wt.pool.idlePerActive();
+ return ForkJoinPool.getSurplusQueuedTaskCount();
}
// Extension methods
@@ -1299,59 +1227,51 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* primarily to support extensions, and is unlikely to be useful
* otherwise.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> peekNextLocalTask() {
- return ((ForkJoinWorkerThread) Thread.currentThread()).workQueue.peek();
+ Thread t; ForkJoinPool.WorkQueue q;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ q = ((ForkJoinWorkerThread)t).workQueue;
+ else
+ q = ForkJoinPool.commonSubmitterQueue();
+ return (q == null) ? null : q.peek();
}
/**
* Unschedules and returns, without executing, the next task
- * queued by the current thread but not yet executed. This method
- * is designed primarily to support extensions, and is unlikely to
- * be useful otherwise.
- *
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
+ * queued by the current thread but not yet executed, if the
+ * current thread is operating in a ForkJoinPool. This method is
+ * designed primarily to support extensions, and is unlikely to be
+ * useful otherwise.
*
* @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollNextLocalTask() {
- return ((ForkJoinWorkerThread) Thread.currentThread())
- .workQueue.nextLocalTask();
+ Thread t;
+ return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread)t).workQueue.nextLocalTask() :
+ null;
}
/**
- * Unschedules and returns, without executing, the next task
+ * If the current thread is operating in a ForkJoinPool,
+ * unschedules and returns, without executing, the next task
* queued by the current thread but not yet executed, if one is
* available, or if not available, a task that was forked by some
* other thread, if available. Availability may be transient, so a
- * {@code null} result does not necessarily imply quiescence
- * of the pool this task is operating in. This method is designed
+ * {@code null} result does not necessarily imply quiescence of
+ * the pool this task is operating in. This method is designed
* primarily to support extensions, and is unlikely to be useful
* otherwise.
*
- * <p>This method may be invoked only from within {@code
- * ForkJoinPool} computations (as may be determined using method
- * {@link #inForkJoinPool}). Attempts to invoke in other contexts
- * result in exceptions or errors, possibly including {@code
- * ClassCastException}.
- *
* @return a task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollTask() {
- ForkJoinWorkerThread wt =
- (ForkJoinWorkerThread)Thread.currentThread();
- return wt.pool.nextTaskFor(wt.workQueue);
+ Thread t; ForkJoinWorkerThread wt;
+ return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ (wt = (ForkJoinWorkerThread)t).pool.nextTaskFor(wt.workQueue) :
+ null;
}
// tag operations
@@ -1540,14 +1460,16 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
// Unsafe mechanics
private static final sun.misc.Unsafe U;
private static final long STATUS;
+
static {
exceptionTableLock = new ReentrantLock();
exceptionTableRefQueue = new ReferenceQueue<Object>();
exceptionTable = new ExceptionNode[EXCEPTION_MAP_CAPACITY];
try {
U = getUnsafe();
+ Class<?> k = ForkJoinTask.class;
STATUS = U.objectFieldOffset
- (ForkJoinTask.class.getDeclaredField("status"));
+ (k.getDeclaredField("status"));
} catch (Exception e) {
throw new Error(e);
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
index 90a0af5723..e62fc6eb71 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
@@ -25,10 +25,17 @@ public class ForkJoinWorkerThread extends Thread {
* ForkJoinWorkerThreads are managed by ForkJoinPools and perform
* ForkJoinTasks. For explanation, see the internal documentation
* of class ForkJoinPool.
+ *
+ * This class just maintains links to its pool and WorkQueue. The
+ * pool field is set immediately upon construction, but the
+ * workQueue field is not set until a call to registerWorker
+ * completes. This leads to a visibility race, that is tolerated
+ * by requiring that the workQueue field is only accessed by the
+ * owning thread.
*/
- final ForkJoinPool.WorkQueue workQueue; // Work-stealing mechanics
final ForkJoinPool pool; // the pool this thread works in
+ final ForkJoinPool.WorkQueue workQueue; // work-stealing mechanics
/**
* Creates a ForkJoinWorkerThread operating in the given pool.
@@ -37,14 +44,10 @@ public class ForkJoinWorkerThread extends Thread {
* @throws NullPointerException if pool is null
*/
protected ForkJoinWorkerThread(ForkJoinPool pool) {
- super(pool.nextWorkerName());
- setDaemon(true);
- Thread.UncaughtExceptionHandler ueh = pool.ueh;
- if (ueh != null)
- setUncaughtExceptionHandler(ueh);
+ // Use a placeholder until a useful name can be set in registerWorker
+ super("aForkJoinWorkerThread");
this.pool = pool;
- pool.registerWorker(this.workQueue = new ForkJoinPool.WorkQueue
- (pool, this, pool.localMode));
+ this.workQueue = pool.registerWorker(this);
}
/**
@@ -116,4 +119,3 @@ public class ForkJoinWorkerThread extends Thread {
}
}
}
-
diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index c5136c752b..d036a6e853 100644
--- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -43,8 +43,6 @@ import scala.tools.nsc.util.InterruptReq
*/
trait CompilerControl { self: Global =>
- import syntaxAnalyzer.UnitParser
-
type Response[T] = scala.tools.nsc.interactive.Response[T]
/** The scheduler by which client and compiler communicate
@@ -297,6 +295,14 @@ trait CompilerControl { self: Global =>
val tpe: Type
val accessible: Boolean
def implicitlyAdded = false
+
+ private def accessible_s = if (accessible) "" else "[inaccessible] "
+ def forceInfoString = {
+ definitions.fullyInitializeSymbol(sym)
+ definitions.fullyInitializeType(tpe)
+ infoString
+ }
+ def infoString = s"$accessible_s${sym.defStringSeenAs(tpe)}"
}
case class TypeMember(
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index dbdb2d02b6..99f2cd4056 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -15,14 +15,13 @@ import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPo
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
import scala.tools.nsc.doc.ScaladocAnalyzer
-import scala.tools.nsc.typechecker.{ Analyzer, DivergentImplicit }
+import scala.tools.nsc.typechecker.Analyzer
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
trait InteractiveScaladocAnalyzer extends InteractiveAnalyzer with ScaladocAnalyzer {
val global : Global
- import global._
override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper {
override def canAdaptConstantTypeToLiteral = false
}
@@ -906,9 +905,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
/** Implements CompilerControl.askDocComment */
private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
response: Response[(String, String, Position)]) {
- informIDE(s"getDocComment $sym at $source site $site")
+ informIDE(s"getDocComment $sym at $source, site $site")
respond(response) {
- withTempUnits(fragments.toList.unzip._2){ units =>
+ withTempUnits(fragments.unzip._2){ units =>
for((sym, src) <- fragments) {
val mirror = findMirrorSymbol(sym, units(src))
if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
@@ -921,17 +920,19 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
}
+ // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
+ newTyperRun()
}
def stabilizedType(tree: Tree): Type = tree match {
- case Ident(_) if tree.symbol.isStable =>
+ case Ident(_) if treeInfo.admitsTypeSelection(tree) =>
singleType(NoPrefix, tree.symbol)
- case Select(qual, _) if qual.tpe != null && tree.symbol.isStable =>
+ case Select(qual, _) if treeInfo.admitsTypeSelection(tree) =>
singleType(qual.tpe, tree.symbol)
case Import(expr, selectors) =>
tree.symbol.info match {
case analyzer.ImportType(expr) => expr match {
- case s@Select(qual, name) => singleType(qual.tpe, s.symbol)
+ case s@Select(qual, name) if treeInfo.admitsTypeSelection(expr) => singleType(qual.tpe, s.symbol)
case i : Ident => i.tpe
case _ => tree.tpe
}
@@ -1034,23 +1035,21 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
- var tree = typedTreeAt(pos)
-
- // if tree consists of just x. or x.fo where fo is not yet a full member name
- // ignore the selection and look in just x.
- tree match {
- case Select(qual, name) if tree.tpe == ErrorType => tree = qual
- case _ =>
+ // Choosing which tree will tell us the type members at the given position:
+ // If pos leads to an Import, type the expr
+ // If pos leads to a Select, type the qualifier as long as it is not erroneous
+ // (this implies discarding the possibly incomplete name in the Select node)
+ // Otherwise, type the tree found at 'pos' directly.
+ val tree0 = typedTreeAt(pos) match {
+ case sel @ Select(qual, _) if sel.tpe == ErrorType => qual
+ case Import(expr, _) => expr
+ case t => t
}
-
val context = doLocateContext(pos)
-
- if (tree.tpe == null)
- // TODO: guard with try/catch to deal with ill-typed qualifiers.
- tree = analyzer.newTyper(context).typedQualifier(tree)
+ // TODO: guard with try/catch to deal with ill-typed qualifiers.
+ val tree = if (tree0.tpe eq null) analyzer newTyper context typedQualifier tree0 else tree0
debugLog("typeMembers at "+tree+" "+tree.tpe)
-
val superAccess = tree.isInstanceOf[Super]
val members = new Members[TypeMember]
@@ -1090,7 +1089,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val applicableViews: List[SearchResult] =
if (ownerTpe.isErroneous) List()
else new ImplicitSearch(
- tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
+ tree, functionType(List(ownerTpe), AnyTpe), isView = true,
context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
for (view <- applicableViews) {
val vtree = viewApply(view)
@@ -1211,9 +1210,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
case ex: TypeError =>
debugLog("type error caught: "+ex)
alt
- case ex: DivergentImplicit =>
- debugLog("divergent implicit caught: "+ex)
- alt
}
}
diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
index b184afd0f5..900a06333d 100644
--- a/src/interactive/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -96,7 +96,7 @@ trait Picklers { self: Global =>
if (!sym.isRoot) {
ownerNames(sym.owner, buf)
buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
- if (!sym.isType && !sym.isStable) {
+ if (!sym.isType && !sym.isStable) { // TODO: what's the reasoning behind this condition!?
val sym1 = sym.owner.info.decl(sym.name)
if (sym1.isOverloaded) {
val index = sym1.alternatives.indexOf(sym)
diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
index 432400ecd2..33981771ec 100644
--- a/src/interactive/scala/tools/nsc/interactive/REPL.scala
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
@@ -2,7 +2,8 @@
* Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interactive
import scala.reflect.internal.util._
@@ -164,7 +165,7 @@ object REPL {
show(reloadResult)
case "reloadAndAskType" :: file :: millis :: Nil =>
comp.askReload(List(toSourceFile(file)), reloadResult)
- Thread.sleep(millis.toInt)
+ Thread.sleep(millis.toLong)
println("ask type now")
comp.askLoadedTyped(toSourceFile(file), typedResult)
typedResult.get
diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
index a24be50b33..410f919daa 100644
--- a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
@@ -7,9 +7,8 @@ package scala.tools.nsc
package interactive
@deprecated("Use scala.reflect.internal.Positions", "2.11.0")
-trait RangePositions extends {
- override val useOffsetPositions = false
-} with scala.reflect.internal.Positions with ast.Trees with ast.Positions {
+trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions {
self: scala.tools.nsc.Global =>
+ override val useOffsetPositions = false
}
diff --git a/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
index 7af9174704..ae70a74b60 100644
--- a/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -1,4 +1,5 @@
-package scala.tools.nsc
+package scala
+package tools.nsc
package interactive
import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
index 8a47c1df37..4390d5dc54 100644
--- a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
@@ -2,7 +2,8 @@
* Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interactive
package tests
@@ -25,7 +26,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
while (!res.isComplete && !res.isCancelled) {
if (System.currentTimeMillis() > limit) {
print("c"); res.cancel()
- } else res.get(TIMEOUT) match {
+ } else res.get(TIMEOUT.toLong) match {
case Some(Left(t)) =>
/**/
if (settings.verbose) println(t)
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
index c0ad245091..e28bf20745 100644
--- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -16,21 +16,18 @@ private[tests] trait CoreTestDefs
extends PresentationCompilerTestDef
with AskCompletionAt {
- def memberPrinter(member: compiler.Member): String =
- "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString.trim + member.tpe.toString()).trim + "`"
-
override def runTest() {
askAllSources(CompletionMarker) { pos =>
askCompletionAt(pos)
} { (pos, members) =>
withResponseDelimiter {
- reporter.println("[response] aksTypeCompletion at " + format(pos))
+ reporter.println("[response] askCompletionAt " + format(pos))
// we skip getClass because it changed signature between 1.5 and 1.6, so there is no
// universal check file that we can provide for this to work
reporter.println("retrieved %d members".format(members.size))
compiler ask { () =>
val filtered = members.filterNot(member => (member.sym.name string_== "getClass") || member.sym.isConstructor)
- reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
+ reporter println (filtered.map(_.forceInfoString).sorted mkString "\n")
}
}
}
@@ -48,7 +45,7 @@ private[tests] trait CoreTestDefs
askTypeAt(pos)
} { (pos, tree) =>
withResponseDelimiter {
- reporter.println("[response] askTypeAt at " + format(pos))
+ reporter.println("[response] askTypeAt " + format(pos))
compiler.ask(() => reporter.println(tree))
}
}
diff --git a/src/jline/build.sbt b/src/jline/build.sbt
index 4fc3bab28a..873f7574f1 100644
--- a/src/jline/build.sbt
+++ b/src/jline/build.sbt
@@ -4,9 +4,9 @@ name := "jline"
organization := "org.scala-lang"
-version := "2.10.0-SNAPSHOT"
+version := "2.11.0-SNAPSHOT"
-scalaVersion := "2.9.0-1"
+scalaVersion := "2.10.1"
// Only need these because of weird testing jline issues.
retrieveManaged := true
@@ -14,11 +14,11 @@ retrieveManaged := true
parallelExecution in Test := false
libraryDependencies ++= Seq(
- "org.fusesource.jansi" % "jansi" % "1.4",
- "com.novocode" % "junit-interface" % "0.7" % "test->default"
+ "org.fusesource.jansi" % "jansi" % "1.10",
+ "com.novocode" % "junit-interface" % "0.9" % "test->default"
)
-javacOptions ++= Seq("-target", "1.5")
+javacOptions ++= Seq("-source", "1.5", "-target", "1.5")
proguardOptions ++= Seq(
"-dontshrink",
diff --git a/src/jline/manual-test.sh b/src/jline/manual-test.sh
index aa5131c903..744e1756e8 100755
--- a/src/jline/manual-test.sh
+++ b/src/jline/manual-test.sh
@@ -3,6 +3,7 @@
# Apparently the jline bundled with sbt interferes with testing some
# changes: for instance after changing the keybindings I kept seeing
# failures until I realized what was happening and bypassed sbt, like this.
+CP=lib_managed/jars/com.novocode/junit-interface/junit-interface-0.9.jar:lib_managed/jars/junit/junit-dep/junit-dep-4.8.2.jar:lib_managed/jars/org.fusesource.jansi/jansi/jansi-1.10.jar:lib_managed/jars/org.hamcrest/hamcrest-core/hamcrest-core-1.1.jar:lib_managed/jars/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.10/test-classes:target/scala-2.10/jline_2.10-2.11.0-SNAPSHOT.min.jar
-java -cp lib_managed/jar/com.novocode/junit-interface/junit-interface-0.5.jar:lib_managed/jar/junit/junit/junit-4.8.1.jar:lib_managed/jar/org.fusesource.jansi/jansi/jansi-1.4.jar:lib_managed/jar/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.9.0.1/test-classes:target/scala-2.9.0.1/jline_2.9.0-1-2.10.0-SNAPSHOT.jar \
-org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
+sbt proguard
+java -cp $CP org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
new file mode 100644
index 0000000000..9b860e23c5
--- /dev/null
+++ b/src/jline/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.12.3
diff --git a/src/jline/project/plugins.sbt b/src/jline/project/plugins.sbt
new file mode 100644
index 0000000000..9c13de92d8
--- /dev/null
+++ b/src/jline/project/plugins.sbt
@@ -0,0 +1,3 @@
+resolvers += Resolver.url("sbt-plugin-releases-scalasbt", url("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
+
+addSbtPlugin("org.scala-sbt" % "xsbt-proguard-plugin" % "0.1.3")
diff --git a/src/jline/project/plugins/build.sbt b/src/jline/project/plugins/build.sbt
deleted file mode 100644
index 0e0f27b1eb..0000000000
--- a/src/jline/project/plugins/build.sbt
+++ /dev/null
@@ -1,5 +0,0 @@
-resolvers += "Proguard plugin repo" at "http://siasia.github.com/maven2"
-
-libraryDependencies <<= (libraryDependencies, appConfiguration) { (deps, app) =>
- deps :+ "com.github.siasia" %% "xsbt-proguard-plugin" % app.provider.id.version
-}
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index e43b7d0a82..ddd11257c6 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -10,6 +10,8 @@
package scala
+import scala.language.implicitConversions
+
/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
* represented by an object in the underlying runtime system.
@@ -114,6 +116,8 @@ object Boolean extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Boolean to be boxed
* @return a java.lang.Boolean offering `x` as its underlying value.
*/
@@ -123,6 +127,8 @@ object Boolean extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Boolean.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Boolean to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Boolean
* @return the Boolean resulting from calling booleanValue() on `x`
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index d1979236d3..2510e859c0 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -606,6 +606,8 @@ object Byte extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Byte to be boxed
* @return a java.lang.Byte offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Byte extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Byte.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Byte to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Byte
* @return the Byte resulting from calling byteValue() on `x`
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 00ddff5b3b..1c9a2ba44f 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -606,6 +606,8 @@ object Char extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToCharacter`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Char to be boxed
* @return a java.lang.Character offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Char extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Character.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToChar`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Character to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Character
* @return the Char resulting from calling charValue() on `x`
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 85bf9fe5c5..ce081bbec1 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -10,6 +10,8 @@
package scala
+import scala.language.implicitConversions
+
/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Double` are not
* represented by an object in the underlying runtime system.
@@ -380,6 +382,8 @@ object Double extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Double to be boxed
* @return a java.lang.Double offering `x` as its underlying value.
*/
@@ -389,6 +393,8 @@ object Double extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Double.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Double to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Double
* @return the Double resulting from calling doubleValue() on `x`
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index f67f45897f..4ff2d509b8 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -382,6 +382,8 @@ object Float extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Float to be boxed
* @return a java.lang.Float offering `x` as its underlying value.
*/
@@ -391,6 +393,8 @@ object Float extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Float.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Float to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Float
* @return the Float resulting from calling floatValue() on `x`
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index 1bacdbcee9..6a27195b10 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -606,6 +606,8 @@ object Int extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToInteger`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Int to be boxed
* @return a java.lang.Integer offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Int extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Integer.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToInt`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Integer to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Integer
* @return the Int resulting from calling intValue() on `x`
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 83adcda819..4d369ae010 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -606,6 +606,8 @@ object Long extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Long to be boxed
* @return a java.lang.Long offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Long extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Long.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Long to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Long
* @return the Long resulting from calling longValue() on `x`
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
deleted file mode 100644
index 535f1ac699..0000000000
--- a/src/library/scala/LowPriorityImplicits.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-
-import scala.collection.{ mutable, immutable, generic }
-import mutable.WrappedArray
-import immutable.WrappedString
-import generic.CanBuildFrom
-import scala.language.implicitConversions
-
-/** The `LowPriorityImplicits` class provides implicit values that
- * are valid in all Scala compilation units without explicit qualification,
- * but that are partially overridden by higher-priority conversions in object
- * `Predef`.
- *
- * @author Martin Odersky
- * @since 2.8
- */
-private[scala] abstract class LowPriorityImplicits {
- /** We prefer the java.lang.* boxed types to these wrappers in
- * any potential conflicts. Conflicts do exist because the wrappers
- * need to implement ScalaNumber in order to have a symmetric equals
- * method, but that implies implementing java.lang.Number as well.
- *
- * Note - these are inlined because they are value classes, but
- * the call to xxxWrapper is not eliminated even though it does nothing.
- * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
- * because maybe loading Predef has side effects!
- */
- @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
- @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
- @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
- @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
- @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
- @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
- @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
- @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
-
- // These eight implicits exist solely to exclude Null from the domain of
- // the boxed types, so that e.g. "var x: Int = null" is a compile time
- // error rather than a delayed null pointer exception by way of the
- // conversion from java.lang.Integer. If defined in the same file as
- // Integer2int, they would have higher priority because Null is a subtype
- // of Integer. We balance that out and create conflict by moving the
- // definition into the superclass.
- //
- // Caution: do not adjust tightrope tension without safety goggles in place.
- implicit def Byte2byteNullConflict(x: Null): Byte = sys.error("value error")
- implicit def Short2shortNullConflict(x: Null): Short = sys.error("value error")
- implicit def Character2charNullConflict(x: Null): Char = sys.error("value error")
- implicit def Integer2intNullConflict(x: Null): Int = sys.error("value error")
- implicit def Long2longNullConflict(x: Null): Long = sys.error("value error")
- implicit def Float2floatNullConflict(x: Null): Float = sys.error("value error")
- implicit def Double2doubleNullConflict(x: Null): Double = sys.error("value error")
- implicit def Boolean2booleanNullConflict(x: Null): Boolean = sys.error("value error")
-
- implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
- if (xs eq null) null
- else WrappedArray.make(xs)
-
- // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
- // is as good as another for all T <: AnyRef. Instead of creating 100,000,000
- // unique ones by way of this implicit, let's share one.
- implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
- if (xs eq null) null
- else if (xs.length == 0) WrappedArray.empty[T]
- else new WrappedArray.ofRef[T](xs)
- }
-
- implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
- implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
- implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
- implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
- implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
- implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
- implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
- implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
- implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
-
- implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
- implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
-
- implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
- new CanBuildFrom[String, T, immutable.IndexedSeq[T]] {
- def apply(from: String) = immutable.IndexedSeq.newBuilder[T]
- def apply() = immutable.IndexedSeq.newBuilder[T]
- }
-}
-
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 4b071166c7..905e925f57 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -128,7 +128,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* val textField = new JComponent(initalText.orNull,20)
* }}}
*/
- @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse null
+ @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null)
/** Returns a $some containing the result of applying $f to this $option's
* value if this $option is nonempty.
@@ -210,7 +210,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
}
/** Tests whether the option contains a given value as an element.
- *
+ *
* @param elem the element to test.
* @return `true` if the option has an element that is equal (as
* determined by `==`) to `elem`, `false` otherwise.
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 7ff5a33586..9ff648a05a 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -81,7 +81,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self =>
override def andThen[C](k: B => C): PartialFunction[A, C] =
new AndThen[A, B, C] (this, k)
- /** Turns this partial function into an plain function returning an `Option` result.
+ /** Turns this partial function into a plain function returning an `Option` result.
* @see Function.unlift
* @return a function that takes an argument `x` to `Some(this(x))` if `this`
* is defined for `x`, and to `None` otherwise.
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 9a468489a2..5ba38600b6 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -346,19 +346,6 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
implicit def double2Double(x: Double) = java.lang.Double.valueOf(x)
implicit def boolean2Boolean(x: Boolean) = java.lang.Boolean.valueOf(x)
- // These next eight implicits exist solely to exclude AnyRef methods from the
- // eight implicits above so that primitives are not coerced to AnyRefs. They
- // only create such conflict for AnyRef methods, so the methods on the java.lang
- // boxed types are unambiguously reachable.
- implicit def byte2ByteConflict(x: Byte) = new AnyRef
- implicit def short2ShortConflict(x: Short) = new AnyRef
- implicit def char2CharacterConflict(x: Char) = new AnyRef
- implicit def int2IntegerConflict(x: Int) = new AnyRef
- implicit def long2LongConflict(x: Long) = new AnyRef
- implicit def float2FloatConflict(x: Float) = new AnyRef
- implicit def double2DoubleConflict(x: Double) = new AnyRef
- implicit def boolean2BooleanConflict(x: Boolean) = new AnyRef
-
implicit def Byte2byte(x: java.lang.Byte): Byte = x.byteValue
implicit def Short2short(x: java.lang.Short): Short = x.shortValue
implicit def Character2char(x: java.lang.Character): Char = x.charValue
@@ -446,3 +433,70 @@ private[scala] trait DeprecatedPredef {
@deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf2(format: String) = ReadStdin.readf2(format)
@deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf3(format: String) = ReadStdin.readf3(format)
}
+
+/** The `LowPriorityImplicits` class provides implicit values that
+* are valid in all Scala compilation units without explicit qualification,
+* but that are partially overridden by higher-priority conversions in object
+* `Predef`.
+*
+* @author Martin Odersky
+* @since 2.8
+*/
+// SI-7335 Parents of Predef are defined in the same compilation unit to avoid
+// cyclic reference errors compiling the standard library *without* a previously
+// compiled copy on the classpath.
+private[scala] abstract class LowPriorityImplicits {
+ import mutable.WrappedArray
+ import immutable.WrappedString
+
+ /** We prefer the java.lang.* boxed types to these wrappers in
+ * any potential conflicts. Conflicts do exist because the wrappers
+ * need to implement ScalaNumber in order to have a symmetric equals
+ * method, but that implies implementing java.lang.Number as well.
+ *
+ * Note - these are inlined because they are value classes, but
+ * the call to xxxWrapper is not eliminated even though it does nothing.
+ * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
+ * because maybe loading Predef has side effects!
+ */
+ @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
+ @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
+ @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
+ @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
+ @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
+ @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
+ @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
+ @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+
+ implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
+ if (xs eq null) null
+ else WrappedArray.make(xs)
+
+ // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
+ // is as good as another for all T <: AnyRef. Instead of creating 100,000,000
+ // unique ones by way of this implicit, let's share one.
+ implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
+ if (xs eq null) null
+ else if (xs.length == 0) WrappedArray.empty[T]
+ else new WrappedArray.ofRef[T](xs)
+ }
+
+ implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
+ implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
+ implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
+ implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
+ implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
+ implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
+ implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
+ implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
+ implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
+
+ implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
+ implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
+
+ implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
+ new CanBuildFrom[String, T, immutable.IndexedSeq[T]] {
+ def apply(from: String) = immutable.IndexedSeq.newBuilder[T]
+ def apply() = immutable.IndexedSeq.newBuilder[T]
+ }
+}
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index cdd298e542..4f91c51550 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -606,6 +606,8 @@ object Short extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Short to be boxed
* @return a java.lang.Short offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Short extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Short.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Short to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Short
* @return the Short resulting from calling shortValue() on `x`
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 1b5fd6c8d3..42fb2f36e8 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -162,7 +162,7 @@ case class StringContext(parts: String*) {
*/
// The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
// Using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def f(args: Any*): String = ??? // macro
+ def f(args: Any*): String = macro ???
}
object StringContext {
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 01e592ec3c..0e59a184d1 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -10,6 +10,9 @@
package scala
+import scala.language.implicitConversions
+
+
/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
* `Unit`, `()`, and it is not represented by any object in the underlying
* runtime system. A method with return type `Unit` is analogous to a Java
diff --git a/src/library/scala/annotation/cloneable.scala b/src/library/scala/annotation/cloneable.scala
deleted file mode 100644
index 4fb62b698f..0000000000
--- a/src/library/scala/annotation/cloneable.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that designates the class to which it is applied as cloneable
- */
-@deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
-class cloneable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala
index 6985563da2..e255e96140 100644
--- a/src/library/scala/collection/BitSet.scala
+++ b/src/library/scala/collection/BitSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 034ed2b24a..f11f3757a6 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import BitSetLike._
import mutable.StringBuilder
@@ -106,8 +107,8 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
private var current = start
private val end = nwords * WordLength
def hasNext: Boolean = {
- while (current < end && !self.contains(current)) current += 1
- current < end
+ while (current != end && !self.contains(current)) current += 1
+ current != end
}
def next(): Int =
if (hasNext) { val r = current; current += 1; r }
@@ -117,7 +118,10 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
override def foreach[B](f: Int => B) {
for (i <- 0 until nwords) {
val w = word(i)
- for (j <- i * WordLength until (i + 1) * WordLength) {
+ /* NOTE: `until` instead of `to` will not work here because
+ the maximum value of `(i + 1) * WordLength` could be
+ `Int.MaxValue + 1` (i.e. `Int.MinValue`). */
+ for (j <- i * WordLength to (i + 1) * WordLength - 1) {
if ((w & (1L << j)) != 0L) f(j)
}
}
@@ -197,11 +201,15 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
sb append start
var pre = ""
- for (i <- 0 until nwords * WordLength)
+ val max = nwords * WordLength
+ var i = 0
+ while(i != max) {
if (contains(i)) {
sb append pre append i
pre = sep
}
+ i += 1
+ }
sb append end
}
@@ -212,6 +220,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
object BitSetLike {
private[collection] val LogWL = 6
private val WordLength = 64
+ private[collection] val MaxSize = (Int.MaxValue >> LogWL) + 1
private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = {
var len = elems.length
diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala
index 741bca4e46..e6e97d584c 100644
--- a/src/library/scala/collection/BufferedIterator.scala
+++ b/src/library/scala/collection/BufferedIterator.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
/** Buffered iterators are iterators which provide a method `head`
* that inspects the next element without discarding it.
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index 53fe32b89f..cbeb28d643 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import parallel.Combiner
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index bbd6b2c2fc..8afda7cfcf 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A default map which implements the `+` and `-` methods of maps.
*
diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala
index b4e7a14ade..6fd4158726 100644
--- a/src/library/scala/collection/GenIterable.scala
+++ b/src/library/scala/collection/GenIterable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index ceb97707e1..1dbb54ddc7 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic.{ CanBuildFrom => CBF }
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
index 5ab48efdf3..cd052ddf79 100644
--- a/src/library/scala/collection/GenIterableView.scala
+++ b/src/library/scala/collection/GenIterableView.scala
@@ -6,6 +6,7 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { }
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index e8d264cdd4..b519e99ae5 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
trait GenIterableViewLike[+A,
+Coll,
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index f7b2ae4d70..3d7427981d 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 367377a59c..4e7d359251 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A trait for all maps upon which operations may be
* implemented in parallel.
diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala
index 4c5488d7e2..480562cab5 100644
--- a/src/library/scala/collection/GenSeq.scala
+++ b/src/library/scala/collection/GenSeq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index 78d63348c0..27b75c0491 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
index 423f8e305e..0a214832ad 100644
--- a/src/library/scala/collection/GenSeqView.scala
+++ b/src/library/scala/collection/GenSeqView.scala
@@ -6,6 +6,7 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { }
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
index 51600218ad..d3af953f72 100644
--- a/src/library/scala/collection/GenSeqViewLike.scala
+++ b/src/library/scala/collection/GenSeqViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala
index 832177b128..2467860095 100644
--- a/src/library/scala/collection/GenSet.scala
+++ b/src/library/scala/collection/GenSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index f22a7c8f09..c5355e58ec 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A template trait for sets which may possibly
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
index 3db2dd77a9..b700f49cf6 100644
--- a/src/library/scala/collection/GenTraversable.scala
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 1080c54325..f4aa063d8a 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index a05ee0fb54..d966c7324b 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import scala.reflect.ClassTag
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
index 1d98eff8c1..7d9a6e9777 100644
--- a/src/library/scala/collection/GenTraversableView.scala
+++ b/src/library/scala/collection/GenTraversableView.scala
@@ -6,6 +6,7 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { }
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
index 8c9607663b..dde18a7a32 100644
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 0b6e640537..1a33026101 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 473202a8eb..18c9175ee1 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ArrayBuffer
import scala.annotation.tailrec
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 09c9ce122c..973efc447e 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index ddb2502965..3a0e2ab115 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** This trait implements a proxy for iterable objects. It forwards all calls
* to a different iterable object.
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 6968a54399..9b8f6f3742 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Buffer
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 985556e0d4..1d631739aa 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import TraversableView.NoBuilder
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index b195ae4bc7..236bfd154c 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import immutable.Stream
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 3cb7edacd6..7bfa60771f 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import convert._
@@ -48,47 +49,4 @@ import convert._
* @author Martin Odersky
* @since 2.8
*/
-object JavaConversions extends WrapAsScala with WrapAsJava {
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B] = Wrappers.ConcurrentMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B] = Wrappers.DictionaryWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A] = Wrappers.IterableWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A] = Wrappers.IteratorWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A] = Wrappers.JCollectionWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B] = Wrappers.JDictionaryWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A] = Wrappers.JEnumerationWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A] = Wrappers.JIterableWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A] = Wrappers.JIteratorWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A] = Wrappers.JListWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B] = Wrappers.JMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper = Wrappers.JPropertiesWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A] = Wrappers.JSetWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B] = Wrappers.MapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A] = Wrappers.MutableBufferWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B] = Wrappers.MutableMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A] = Wrappers.MutableSeqWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A] = Wrappers.MutableSetWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A] = Wrappers.SeqWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A] = Wrappers.SetWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A] = Wrappers.ToIteratorWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper = Wrappers.DictionaryWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper = Wrappers.IterableWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper = Wrappers.IteratorWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper = Wrappers.JCollectionWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper = Wrappers.JConcurrentMapWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper = Wrappers.JDictionaryWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper = Wrappers.JEnumerationWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper = Wrappers.JIterableWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper = Wrappers.JIteratorWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper = Wrappers.JListWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper = Wrappers.JMapWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper = Wrappers.JPropertiesWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper = Wrappers.JSetWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper = Wrappers.MutableBufferWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper = Wrappers.MutableMapWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper
-}
-
-
+object JavaConversions extends WrapAsScala with WrapAsJava
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index 7700d90560..a4fa58b13c 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import convert._
@@ -54,15 +55,4 @@ import convert._
* @author Martin Odersky
* @since 2.8.1
*/
-object JavaConverters extends DecorateAsJava with DecorateAsScala {
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJava[A] = Decorators.AsJava[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsScala[A] = Decorators.AsScala[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJavaCollection[A] = Decorators.AsJavaCollection[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B]
-}
+object JavaConverters extends DecorateAsJava with DecorateAsScala
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index e52a1936fe..1e4975a0a7 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index a4bb194f8a..ff7985bf0d 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import immutable.List
import scala.annotation.tailrec
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 9cf37981f4..21bfedf5de 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ListBuffer
import immutable.List
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 18ad20a855..f37c0993d4 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index cc0129202f..5ec7d5c615 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, MapBuilder }
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index e85d306e6f..941c1f5a4a 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** This is a simple wrapper class for [[scala.collection.Map]].
* It is most useful for assembling customized map abstractions
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index ad09f7b970..44481131aa 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
// Methods could be printed by cat MapLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index 6731f74bea..174e3ab75e 100644
--- a/src/library/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A marker trait for collections which have their operations parallelised.
*
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index 626dfa4032..b737752458 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import parallel.Combiner
diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala
index 03eb4283ad..fec4bbf502 100644
--- a/src/library/scala/collection/Searching.scala
+++ b/src/library/scala/collection/Searching.scala
@@ -6,8 +6,10 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
+import scala.language.implicitConversions
import scala.annotation.tailrec
import scala.collection.generic.IsSeqLike
import scala.math.Ordering
@@ -50,7 +52,7 @@ object Searching {
* sequence, or the `InsertionPoint` where the element would be inserted if
* the element is not in the sequence.
*/
- final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult =
+ final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult =
coll match {
case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord)
case _ => linearSearch(coll.view, elem, 0)(ord)
@@ -77,7 +79,7 @@ object Searching {
* the element is not in the sequence.
*/
final def search[B >: A](elem: B, from: Int, to: Int)
- (implicit ord: Ordering[B]): SearchResult =
+ (implicit ord: Ordering[B]): SearchResult =
coll match {
case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord)
case _ => linearSearch(coll.view(from, to), elem, from)(ord)
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index 33e66c0874..b21acdd9b7 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
index 20ea7f54b7..2398313c77 100644
--- a/src/library/scala/collection/SeqExtractors.scala
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
/** An extractor used to head/tail deconstruct sequences. */
object +: {
diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala
index 1f8dc4aad1..9c5424a3a6 100644
--- a/src/library/scala/collection/SeqProxy.scala
+++ b/src/library/scala/collection/SeqProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
/** This trait implements a proxy for sequence objects. It forwards
* all calls to a different sequence object.
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index ee88ee3da3..161b23d3f8 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index c26124cf6f..40dfd50212 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import TraversableView.NoBuilder
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 27536791a2..1194cd7199 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import Seq.fill
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index c304323d28..46d5dfa056 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 9fd24317f2..0c5c7e0b29 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, SetBuilder }
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index 08075a7121..f9f38f148a 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** This is a simple wrapper class for [[scala.collection.Set]].
* It is most useful for assembling customized set abstractions
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 265d1c4806..ac3d34dbab 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
// Methods could be printed by cat SetLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 86fcfac94d..0705a1e9e0 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 934ed831f5..3fc8b0dadc 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala
index 2d5d4fb55e..43189d2e8c 100644
--- a/src/library/scala/collection/SortedSet.scala
+++ b/src/library/scala/collection/SortedSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
import generic._
/** A sorted set.
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 6d1d1ac111..eb2ac38c59 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
import generic._
/** A template for sets which are sorted.
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 4ca2095f4c..61d9a42f04 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index fdbc5e9857..00f4de82cd 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder }
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index fcca2da437..526c36dda7 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
import generic.CanBuildFrom
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 568298a9d9..65936da0e4 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 8896cd1b0f..77d651c5f2 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{Buffer, StringBuilder}
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index af219084b8..bbb5bde464 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 36f6210ef4..c507e000ee 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, ArrayBuffer }
diff --git a/src/library/scala/collection/concurrent/BasicNode.java b/src/library/scala/collection/concurrent/BasicNode.java
index a65d84bbf8..97b8870036 100644
--- a/src/library/scala/collection/concurrent/BasicNode.java
+++ b/src/library/scala/collection/concurrent/BasicNode.java
@@ -8,13 +8,8 @@
package scala.collection.concurrent;
-
-
-
-
-
public abstract class BasicNode {
-
+
public abstract String string(int lev);
-
-} \ No newline at end of file
+
+}
diff --git a/src/library/scala/collection/concurrent/CNodeBase.java b/src/library/scala/collection/concurrent/CNodeBase.java
index d6eb29c8df..2fce971b2b 100644
--- a/src/library/scala/collection/concurrent/CNodeBase.java
+++ b/src/library/scala/collection/concurrent/CNodeBase.java
@@ -8,28 +8,26 @@
package scala.collection.concurrent;
-
-
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
+abstract class CNodeBase<K, V> extends MainNode<K, V> {
+ @SuppressWarnings("rawtypes")
+ public static final AtomicIntegerFieldUpdater<CNodeBase> updater =
+ AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
-abstract class CNodeBase<K, V> extends MainNode<K, V> {
-
- public static final AtomicIntegerFieldUpdater<CNodeBase> updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
-
public volatile int csize = -1;
-
+
public boolean CAS_SIZE(int oldval, int nval) {
return updater.compareAndSet(this, oldval, nval);
}
-
+
public void WRITE_SIZE(int nval) {
updater.set(this, nval);
}
-
+
public int READ_SIZE() {
return updater.get(this);
}
-
+
} \ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Gen.java b/src/library/scala/collection/concurrent/Gen.java
index 331eeca16b..6019884683 100644
--- a/src/library/scala/collection/concurrent/Gen.java
+++ b/src/library/scala/collection/concurrent/Gen.java
@@ -8,11 +8,4 @@
package scala.collection.concurrent;
-
-
-
-
-
-final class Gen {
-}
-
+final class Gen {}
diff --git a/src/library/scala/collection/concurrent/INodeBase.java b/src/library/scala/collection/concurrent/INodeBase.java
index cbe404edf6..2f2d203287 100644
--- a/src/library/scala/collection/concurrent/INodeBase.java
+++ b/src/library/scala/collection/concurrent/INodeBase.java
@@ -8,28 +8,26 @@
package scala.collection.concurrent;
-
-
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+abstract class INodeBase<K, V> extends BasicNode {
+ @SuppressWarnings("rawtypes")
+ public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater =
+ AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
-abstract class INodeBase<K, V> extends BasicNode {
-
- public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
-
public static final Object RESTART = new Object();
-
+
public volatile MainNode<K, V> mainnode = null;
-
+
public final Gen gen;
-
+
public INodeBase(Gen generation) {
gen = generation;
}
-
+
public BasicNode prev() {
return null;
}
-
+
} \ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java
index ffe535742e..adb9b59a3d 100644
--- a/src/library/scala/collection/concurrent/MainNode.java
+++ b/src/library/scala/collection/concurrent/MainNode.java
@@ -8,33 +8,32 @@
package scala.collection.concurrent;
-
-
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+abstract class MainNode<K, V> extends BasicNode {
+ @SuppressWarnings("rawtypes")
+ public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater =
+ AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
-abstract class MainNode<K, V> extends BasicNode {
-
- public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
-
public volatile MainNode<K, V> prev = null;
-
+
public abstract int cachedSize(Object ct);
-
+
public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) {
return updater.compareAndSet(this, oldval, nval);
}
-
+
public void WRITE_PREV(MainNode<K, V> nval) {
updater.set(this, nval);
}
-
+
// do we need this? unclear in the javadocs...
// apparently not - volatile reads are supposed to be safe
// irregardless of whether there are concurrent ARFU updates
+ @Deprecated @SuppressWarnings("unchecked")
public MainNode<K, V> READ_PREV() {
return updater.get(this);
}
-
+
} \ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index b2276ce5aa..02e5dd01f5 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.concurrent
+package scala
+package collection.concurrent
/** A template trait for mutable maps that allow concurrent access.
*
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 4eeacd7377..6632f30e51 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package concurrent
import java.util.concurrent.atomic._
@@ -428,10 +429,10 @@ extends MainNode[K, V] with KVNode[K, V] {
}
-private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V])
+private[collection] final class LNode[K, V](final val listmap: immutable.ListMap[K, V])
extends MainNode[K, V] {
- def this(k: K, v: V) = this(ImmutableListMap(k -> v))
- def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
+ def this(k: K, v: V) = this(immutable.ListMap(k -> v))
+ def this(k1: K, v1: V, k2: K, v2: V) = this(immutable.ListMap(k1 -> v1, k2 -> v2))
def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = {
val updmap = listmap - k
@@ -1011,8 +1012,11 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
*/
protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) {
// the case where an LNode is being iterated
- val it = subiter
- subiter = null
+ val it = newIterator(level + 1, ct, _mustInit = false)
+ it.depth = -1
+ it.subiter = this.subiter
+ it.current = null
+ this.subiter = null
advance()
this.level += 1
Seq(it, this)
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index 7447c1bbaf..498bdc5943 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 90e8dded6e..c724831c54 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
index f004e4712b..d232fa04e1 100644
--- a/src/library/scala/collection/convert/Decorators.scala
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ util => ju }
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 9665ffa045..e75a0e2981 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index f43eae10d6..d4ab451b0d 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 69e9a8fff4..4410ddc7d8 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
index ea66101aca..13970f9a3e 100644
--- a/src/library/scala/collection/convert/package.scala
+++ b/src/library/scala/collection/convert/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package object convert {
val decorateAsJava = new DecorateAsJava { }
diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala
index c45ebcf982..d430ece2f5 100644
--- a/src/library/scala/collection/generic/BitOperations.scala
+++ b/src/library/scala/collection/generic/BitOperations.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Some bit operations.
diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala
index 46e2d29612..2e3aae31ac 100644
--- a/src/library/scala/collection/generic/BitSetFactory.scala
+++ b/src/library/scala/collection/generic/BitSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.collection._
diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala
index 73fd4fc026..24e5b2a1dd 100644
--- a/src/library/scala/collection/generic/CanBuildFrom.scala
+++ b/src/library/scala/collection/generic/CanBuildFrom.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index 9ca3332ba0..7f70b4580a 100644
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.collection.parallel._
diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
index 85cdbd7276..e3db40123d 100644
--- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
+++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala
index a04ecb2a68..3c496051c4 100644
--- a/src/library/scala/collection/generic/Clearable.scala
+++ b/src/library/scala/collection/generic/Clearable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** This trait forms part of collections that can be cleared
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index e21f0be898..8aefbdb926 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -6,8 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
-
+package scala
+package collection
+package generic
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `TraversableLike`.
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index 5a183c307b..ae3150115f 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.{Builder, MapBuilder}
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index dd375c567c..6afbb2e2fb 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index 9774805cf8..800f66eb53 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 0e1a5534c0..2092c0c5f5 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
index cdfee5252f..a8ac2bf738 100644
--- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
index f327710848..090cd729a4 100644
--- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 66052d0e6f..67d0a9c7f7 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index 7a0c0a63e8..5b328bff6c 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index a624e8ca93..c1a41ce7c4 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index bb39461e7e..432b9135f8 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 94c76630d6..b9b7043270 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 8b2f8a0fcb..34b20f23a2 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index ecfdcffbc5..2cadd14948 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
/**
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 908aa5b126..cd48cd23f4 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
@@ -73,11 +74,20 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of pairs into two collections of the first and second
* half of each pair.
*
+ * {{{
+ * val xs = $Coll(
+ * (1, "one"),
+ * (2, "two"),
+ * (3, "three")).unzip
+ * // xs == ($Coll(1, 2, 3),
+ * // $Coll(one, two, three))
+ * }}}
+ *
* @tparam A1 the type of the first half of the element pairs
* @tparam A2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
* of this $coll is a pair.
- * @return a pair ${coll}s, containing the first, respectively second
+ * @return a pair of ${coll}s, containing the first, respectively second
* half of each element pair of this $coll.
*/
def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = {
@@ -94,12 +104,22 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of triples into three collections of the first, second,
* and third element of each triple.
*
+ * {{{
+ * val xs = $Coll(
+ * (1, "one", '1'),
+ * (2, "two", '2'),
+ * (3, "three", '3')).unzip3
+ * // xs == ($Coll(1, 2, 3),
+ * // $Coll(one, two, three),
+ * // $Coll(1, 2, 3))
+ * }}}
+ *
* @tparam A1 the type of the first member of the element triples
* @tparam A2 the type of the second member of the element triples
* @tparam A3 the type of the third member of the element triples
* @param asTriple an implicit conversion which asserts that the element type
* of this $coll is a triple.
- * @return a triple ${coll}s, containing the first, second, respectively
+ * @return a triple of ${coll}s, containing the first, second, respectively
* third member of each element triple of this $coll.
*/
def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = {
@@ -133,10 +153,16 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* static type of $coll. For example:
*
* {{{
- * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+ * val xs = List(
+ * Set(1, 2, 3),
+ * Set(1, 2, 3)
+ * ).flatten
* // xs == List(1, 2, 3, 1, 2, 3)
*
- * val ys = Set(List(1, 2, 3), List(3, 2, 1))
+ * val ys = Set(
+ * List(1, 2, 3),
+ * List(3, 2, 1)
+ * ).flatten
* // ys == Set(1, 2, 3)
* }}}
*/
@@ -150,6 +176,27 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Transposes this $coll of traversable collections into
* a $coll of ${coll}s.
*
+ * The resulting collection's type will be guided by the
+ * static type of $coll. For example:
+ *
+ * {{{
+ * val xs = List(
+ * Set(1, 2, 3),
+ * Set(4, 5, 6)).transpose
+ * // xs == List(
+ * // List(1, 4),
+ * // List(2, 5),
+ * // List(3, 6))
+ *
+ * val ys = Vector(
+ * List(1, 2, 3),
+ * List(4, 5, 6)).transpose
+ * // ys == Vector(
+ * // Vector(1, 4),
+ * // Vector(2, 5),
+ * // Vector(3, 6))
+ * }}}
+ *
* @tparam B the type of the elements of each traversable collection.
* @param asTraversable an implicit conversion which asserts that the
* element type of this $coll is a `Traversable`.
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index 52a0d32de1..254d4566be 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.annotation.tailrec
@@ -47,15 +48,15 @@ trait Growable[-A] extends Clearable {
* @return the $coll itself.
*/
def ++=(xs: TraversableOnce[A]): this.type = {
- @tailrec def loop(xs: collection.LinearSeq[A]) {
+ @tailrec def loop(xs: scala.collection.LinearSeq[A]) {
if (xs.nonEmpty) {
this += xs.head
loop(xs.tail)
}
}
xs.seq match {
- case xs: collection.LinearSeq[_] => loop(xs)
- case xs => xs foreach +=
+ case xs: scala.collection.LinearSeq[_] => loop(xs)
+ case xs => xs foreach +=
}
this
}
diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala
index 1a981b487f..aa0ce6698d 100755
--- a/src/library/scala/collection/generic/HasNewBuilder.scala
+++ b/src/library/scala/collection/generic/HasNewBuilder.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index 1ecfba19af..99a0722c3d 100644
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index 4ce50a31f9..7d857bf1b4 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index 2e960e670d..f4d4e061bb 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.{ Builder, SetBuilder }
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index 7743fc2281..730e58a527 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index 9914557b51..1fd4a8c99d 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala
index e86d163b3c..ddc0141aa9 100644
--- a/src/library/scala/collection/generic/IndexedSeqFactory.scala
+++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import language.higherKinds
diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala
index d1dffdf8cf..189aea4632 100644
--- a/src/library/scala/collection/generic/IsSeqLike.scala
+++ b/src/library/scala/collection/generic/IsSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Type class witnessing that a collection representation type `Repr` has
@@ -41,7 +42,7 @@ trait IsSeqLike[Repr] {
}
object IsSeqLike {
- import language.higherKinds
+ import scala.language.higherKinds
implicit val stringRepr: IsSeqLike[String] { type A = Char } =
new IsSeqLike[String] {
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index c70772d8f9..22cef555cc 100644
--- a/src/library/scala/collection/generic/IsTraversableLike.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** A trait which can be used to avoid code duplication when defining extension
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
index bb5404c92d..3ee586ae63 100644
--- a/src/library/scala/collection/generic/IsTraversableOnce.scala
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Type class witnessing that a collection representation type `Repr` has
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 8feace3f8b..5a54ed7f78 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection._
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index 565850bee2..b9f3d4b010 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index ac139cc80c..14c5b6bac3 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index 9c69d53608..63944657fc 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.{ Builder, GrowingBuilder }
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index b9be83c3c4..0339a523e9 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index a2de108721..7657aff2aa 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index bb88d26dec..486e2a115e 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.ParIterable
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 0a6b08ae34..70797c83e2 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.ParMap
import scala.collection.parallel.ParMapLike
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 3727ab89f7..4320635ae6 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.mutable.Builder
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index a66074741a..35cce11a79 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index aafaffc159..c23b818c00 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection._
import scala.collection.immutable.Range
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index e9bbde92f3..fcd8d00c18 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index b00048fd5d..b7412afde0 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
/** This trait forms part of collections that can be reduced
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index 442a7c126e..e62eb6ff09 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import java.util.concurrent.atomic.AtomicInteger
diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala
index 1191259b3a..73584ce82e 100644
--- a/src/library/scala/collection/generic/Sizing.scala
+++ b/src/library/scala/collection/generic/Sizing.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
/** A trait for objects which have a size.
*/
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 244e960454..82acdd1371 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** A container for the endpoints of a collection slice.
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index 2c3d200d01..3876da3275 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Any collection (including maps) whose keys (or elements) are ordered.
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index fb3fe0fcb6..afa11e9ab1 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.{Builder, MapBuilder}
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index f48e1c69e1..c734830e0b 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.{Builder, SetBuilder}
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index e0fe07a0a4..32a9000296 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 5d1c9d17e2..ad6d8fd198 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 2662018feb..c71368bba0 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection._
import mutable.{ Buffer, StringBuilder }
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index dd47b7ace6..1beb4a8599 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
import generic.CanBuildFrom
import scala.language.higherKinds
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 1da6edd740..70543aa3a6 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
index 5ae5ef66fb..42a03e90ee 100755
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
/** A default map which implements the `+` and `-`
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 44e5304e09..e74a19ef5b 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -247,7 +247,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (kvs1.isEmpty)
+ if (kvs1 eq kvs)
+ this
+ else if (kvs1.isEmpty)
HashMap.empty[A,B]
else if(kvs1.tail.isEmpty) {
val kv = kvs1.head
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 9316ff5e72..33170fdd59 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index cc64d8ff3c..6e4eb1e45f 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index 5ede6d90d0..2109bd5211 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index be233d06cb..c01694960c 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -85,7 +86,8 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with LinearSeq[A]
with Product
with GenericTraversableTemplate[A, List]
- with LinearSeqOptimized[A, List[A]] {
+ with LinearSeqOptimized[A, List[A]]
+ with Serializable {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 75817350e5..49295d92dd 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -180,7 +181,7 @@ extends AbstractMap[A, B]
* it will be overridden by this function.
*/
override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
- val m = if (contains(k)) this - k else this
+ val m = this - k
new m.Node[B2](k, v)
}
@@ -188,32 +189,17 @@ extends AbstractMap[A, B]
* If the map does not contain a mapping for the given key, the
* method returns the same map.
*/
- override def - (k: A): ListMap[A, B1] = {
- // This definition used to result in stack overflows
- // if (k == key)
- // next
- // else {
- // val tail = next - k
- // if (tail eq next) this
- // else new tail.Node(key, value)
- // }
- // we use an imperative one instead (and use an auxiliary list to preserve order!):
- var cur: ListMap[A, B1] = this
- var lst: List[(A, B1)] = Nil
- while (cur.nonEmpty) {
- if (k != cur.key) lst ::= ((cur.key, cur.value))
- cur = cur.tail
- }
- var acc = ListMap[A, B1]()
- while (lst != Nil) {
- val elem = lst.head
- val stbl = acc
- acc = new stbl.Node(elem._1, elem._2)
- lst = lst.tail
- }
- acc
- }
-
+ override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil)
+
+ @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
+ if (cur.isEmpty)
+ acc.last
+ else if (k == cur.key)
+ (cur.tail /: acc) {
+ case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
+ }
+ else
+ remove0(k, cur.tail, cur::acc)
override def tail: ListMap[A, B1] = ListMap.this
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index def3d7eb23..5836321010 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2ebf5035e1..a3f5c85961 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 1c2ab1c662..f6041464e7 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index f3f04ec346..f9f19c021d 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/**
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 5842ff30e6..486c2b6c8f 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import mutable.{ Builder, ListBuffer }
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 4069f6f0e4..e190d022d7 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import java.io._
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 7d2ff95792..c55b46bf05 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 243e3fcb91..72c40e889f 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.immutable
+package scala
+package collection.immutable
import scala.collection.parallel.immutable.ParRange
@@ -64,6 +65,7 @@ extends scala.collection.AbstractSeq[Int]
|| (start < end && step < 0)
|| (start == end && !isInclusive)
)
+ @deprecated("This method will be made private, use `length` instead.", "2.11")
final val numRangeElements: Int = {
if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
else if (isEmpty) 0
@@ -73,7 +75,9 @@ extends scala.collection.AbstractSeq[Int]
else len.toInt
}
}
+ @deprecated("This method will be made private, use `last` instead.", "2.11")
final val lastElement = start + (numRangeElements - 1) * step
+ @deprecated("This method will be made private.", "2.11")
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
@@ -81,14 +85,14 @@ extends scala.collection.AbstractSeq[Int]
override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
- if (step > 0) start
+ if (step > 0) head
else last
} else super.min(ord)
override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
if (step > 0) last
- else start
+ else head
} else super.max(ord)
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 14610aea33..38855ca6b0 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 8433c2b002..2c96f4c194 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala
index 06c6843181..9e25678435 100644
--- a/src/library/scala/collection/immutable/SetProxy.scala
+++ b/src/library/scala/collection/immutable/SetProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/** This is a simple wrapper class for <a href="Set.html"
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 73cc55df00..4b9fa81a8c 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 3f75d50555..4a8859a7ab 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 357e9a123c..65a34e2310 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 0770bd3175..5e1de44749 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -183,10 +184,12 @@ import scala.language.implicitConversions
* @define orderDependentFold
* @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections.
*/
+@deprecatedInheritance("This class will be sealed.", "2.11.0")
abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
- with LinearSeqOptimized[A, Stream[A]] {
+ with LinearSeqOptimized[A, Stream[A]]
+ with Serializable {
self =>
override def companion: GenericCompanion[Stream] = Stream
@@ -1048,7 +1051,7 @@ object Stream extends SeqFactory[Stream] {
def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
- object Empty extends Stream[Nothing] with Serializable {
+ object Empty extends Stream[Nothing] {
override def isEmpty = true
override def head = throw new NoSuchElementException("head of empty stream")
override def tail = throw new UnsupportedOperationException("tail of empty stream")
@@ -1099,7 +1102,7 @@ object Stream extends SeqFactory[Stream] {
/** A lazy cons cell, from which streams are built. */
@SerialVersionUID(-602202424901551803L)
- final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] with Serializable {
+ final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] {
override def isEmpty = false
override def head = hd
@volatile private[this] var tlVal: Stream[A] = _
diff --git a/src/library/scala/collection/immutable/StreamView.scala b/src/library/scala/collection/immutable/StreamView.scala
index 5a24b77eb3..127ed76eb5 100644
--- a/src/library/scala/collection/immutable/StreamView.scala
+++ b/src/library/scala/collection/immutable/StreamView.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
package immutable
trait StreamView[+A, +Coll] extends StreamViewLike[A, Coll, StreamView[A, Coll]] { }
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 236308da2e..ccab032cfd 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 389e1579f2..5a0d24ddd2 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import mutable.Builder
@@ -221,12 +222,33 @@ self =>
*/
def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
+ /**
+ * @throws `java.lang.IllegalArgumentException` - If the string does not contain a parsable boolean.
+ */
def toBoolean: Boolean = parseBoolean(toString)
+ /**
+ * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable byte.
+ */
def toByte: Byte = java.lang.Byte.parseByte(toString)
+ /**
+ * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable short.
+ */
def toShort: Short = java.lang.Short.parseShort(toString)
+ /**
+ * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable int.
+ */
def toInt: Int = java.lang.Integer.parseInt(toString)
+ /**
+ * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable long.
+ */
def toLong: Long = java.lang.Long.parseLong(toString)
+ /**
+ * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable float.
+ */
def toFloat: Float = java.lang.Float.parseFloat(toString)
+ /**
+ * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable double.
+ */
def toDouble: Double = java.lang.Double.parseDouble(toString)
private def parseBoolean(s: String): Boolean =
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 16c1f96cc2..6737692fb1 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import mutable.StringBuilder
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 5188343011..775d635fae 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 5085039da5..4d2ec579db 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index e25d16408a..dfe1a833ef 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index dbe013d6e8..d7335e80f1 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index edcab31f33..d6bebf9ef5 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index dd7a94d677..d2205f9994 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -10,7 +10,6 @@ package scala
package collection
package mutable
-
/**
* An immutable AVL Tree implementation used by mutable.TreeSet
*
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index f1cfd2d69a..2d43b352c5 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 2fe3e91d68..65b4d52a60 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 40017aa08e..4a6820856d 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out most
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 334b26ae03..577a838315 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 397f8099eb..f252a0bfeb 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -8,11 +8,12 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
-import BitSetLike.{LogWL, updateArray}
+import BitSetLike.{LogWL, MaxSize, updateArray}
/** A class for mutable bitsets.
*
@@ -63,9 +64,10 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
}
private def ensureCapacity(idx: Int) {
+ require(idx < MaxSize)
if (idx >= nwords) {
var newlen = nwords
- while (idx >= newlen) newlen = newlen * 2
+ while (idx >= newlen) newlen = (newlen * 2) min MaxSize
val elems1 = new Array[Long](newlen)
Array.copy(elems, 0, elems1, 0, nwords)
elems = elems1
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index 230799c6f3..d2e33badbe 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 322522fdd2..4b3d3bc1cf 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index d3f96f69ad..14946cdfd8 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import script._
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index dadcd36257..8b2f3f70de 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** A trait for cloneable collections.
diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala
index f14cb4ac2b..66db45866c 100644
--- a/src/library/scala/collection/mutable/DefaultEntry.scala
+++ b/src/library/scala/collection/mutable/DefaultEntry.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** Class used internally for default map model.
diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala
index 903f117466..4dfae047c3 100644
--- a/src/library/scala/collection/mutable/DefaultMapModel.scala
+++ b/src/library/scala/collection/mutable/DefaultMapModel.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** This class is used internally. It implements the mutable `Map`
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index a106794912..8654a91cf1 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index 3f223f30ec..776ac76c45 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.migration
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 49a9427588..1cdf150cb8 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -6,11 +6,10 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package mutable
-
/** An implementation class backing a `HashSet`.
*
* This trait is used internally. It can be mixed in with various collections relying on
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index ba7ea60df1..c4b5e546aa 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala
index 5cd976eb47..4c0f6a93e8 100644
--- a/src/library/scala/collection/mutable/HashEntry.scala
+++ b/src/library/scala/collection/mutable/HashEntry.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** Class used internally.
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 692d6b8d6a..6fca75ffea 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -93,7 +94,7 @@ extends AbstractMap[A, B]
def -=(key: A): this.type = { removeEntry(key); this }
- def iterator = entriesIterator map {e => (e.key, e.value)}
+ def iterator = entriesIterator map (e => ((e.key, e.value)))
override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value)))
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 753f7f8d01..886fee5a59 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 37d2b51a91..0479b51830 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** This class can be used to construct data structures that are based
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index 34e8f7d5b8..19148c0ac2 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 755eea831f..88e9e9db8f 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.migration
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 42c757d3bf..3e64747832 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index 4d094e697b..3d9630eea7 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index 7b582eb5cb..4cf794c32f 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A subtrait of scala.collection.IndexedSeq which represents sequences
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
index 80b527a7b9..09f0712862 100755
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A subtrait of scala.collection.IndexedSeq which represents sequences
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index a88ed8f123..36c1076b87 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index b79453e4e9..f7a794e357 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index 0b56c86ac4..ebee38b77f 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A builder that constructs its result lazily. Iterators or iterables to
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index f241a2f6d4..3fa10042ef 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala
index e4e29122d8..296e7fde18 100644
--- a/src/library/scala/collection/mutable/LinkedEntry.scala
+++ b/src/library/scala/collection/mutable/LinkedEntry.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** Class for the linked hash map entry, used internally.
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 14f30d74e8..536f320402 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 5641a78d46..d89566793f 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 29e6fdd375..a772f86d78 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 3003080060..6d4565026f 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.tailrec
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index af1d7e4183..7f54692c8b 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -261,7 +262,7 @@ final class ListBuffer[A]
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
*/
- @annotation.migration("Invalid input values will be rejected in future releases.", "2.11")
+ @scala.annotation.migration("Invalid input values will be rejected in future releases.", "2.11")
override def remove(n: Int, count: Int) {
if (n >= len)
return
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 7f05deffc8..0b5d34cd81 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index f72e1fc4e7..7280aaec25 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -47,7 +48,7 @@ trait Map[A, B]
*/
def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d)
- /** Return a read-only projection of this map. !!! or just use an (immutable) MapProxy?
+ /* Return a read-only projection of this map. !!! or just use an (immutable) MapProxy?
def readOnly : scala.collection.Map[A, B] = new scala.collection.Map[A, B] {
override def size = self.size
override def update(key: A, value: B) = self.update(key, value)
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index 8468e09e4d..a5a6b12ea9 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** The canonical builder for immutable maps, working with the map's `+` method
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 49d1e039f0..dbe32fdb79 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -7,12 +7,13 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
import scala.annotation.migration
-import parallel.mutable.ParMap
+import scala.collection.parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index c730e2b7c8..e4f106731c 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 4635bfbe64..78dfc35268 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 03110569c4..5727b12975 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index 7a2fce9128..7fe794caaf 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index 3544275300..0dec6fa516 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 81580316ff..acb6d92c8c 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 4e8b923155..b5b1c1d006 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index ee54370731..4cb49be9b8 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** This class servers as a proxy for priority queues. The
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index 8c2ef0d3a3..22bbea16ef 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index f1a5723818..e80aae2171 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index 051b1219cd..bfc5edbeff 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** `Queue` objects implement data structures that allow to
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 9b8554669b..725a8113ec 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 9d9399ebb4..11fbdd13f3 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index ddfde536c9..6987066f2b 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import parallel.mutable.ParSeq
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index 023ff63056..4439880976 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 40f0b8932c..01bfdc96ed 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** The canonical builder for mutable Sets.
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 8dfcde16ce..073bc59f61 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala
index c9f297509f..9568a54276 100644
--- a/src/library/scala/collection/mutable/SetProxy.scala
+++ b/src/library/scala/collection/mutable/SetProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** This is a simple wrapper class for [[scala.collection.mutable.Set]].
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 41f2c6e39f..0f2fa75abd 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 6eef250f9d..34c6d1fbb9 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 8792738339..7d776b99c3 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A stack implements a data structure which allows to store and retrieve
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 4d269a95b1..498e9e461e 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import java.lang.{ StringBuilder => JavaStringBuilder }
@@ -255,8 +256,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
* @return This StringBuilder.
*/
def append(x: Boolean): StringBuilder = { underlying append x ; this }
- def append(x: Byte): StringBuilder = { underlying append x ; this }
- def append(x: Short): StringBuilder = { underlying append x ; this }
+ def append(x: Byte): StringBuilder = append(x.toInt)
+ def append(x: Short): StringBuilder = append(x.toInt)
def append(x: Int): StringBuilder = { underlying append x ; this }
def append(x: Long): StringBuilder = { underlying append x ; this }
def append(x: Float): StringBuilder = { underlying append x ; this }
@@ -359,8 +360,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
* @return this StringBuilder.
*/
def insert(index: Int, x: Boolean): StringBuilder = insert(index, String.valueOf(x))
- def insert(index: Int, x: Byte): StringBuilder = insert(index, String.valueOf(x))
- def insert(index: Int, x: Short): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Byte): StringBuilder = insert(index, x.toInt)
+ def insert(index: Int, x: Short): StringBuilder = insert(index, x.toInt)
def insert(index: Int, x: Int): StringBuilder = insert(index, String.valueOf(x))
def insert(index: Int, x: Long): StringBuilder = insert(index, String.valueOf(x))
def insert(index: Int, x: Float): StringBuilder = insert(index, String.valueOf(x))
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index 35d31d7316..c2aa9be72d 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** `Subscriber[A, B]` objects may subscribe to events of type `A`
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index 14ec85b906..b97191137f 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 5a3562cb22..643701eda9 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.migration
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 52e55677bd..05676597db 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** This class implements synchronized priority queues using a binary heap.
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 57beab39b6..263c06b439 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index 27a696895d..441efea5fb 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import script._
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index 09cdcca99e..c92a68ffde 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index e36ffc847f..d7ea376d28 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 147bc85383..ea5b859367 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala
index 0c0e8fed3e..482d618165 100644
--- a/src/library/scala/collection/mutable/Undoable.scala
+++ b/src/library/scala/collection/mutable/Undoable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index ac634f43aa..292705c02a 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.mutable
+package scala
+package collection.mutable
import scala.collection.AbstractIterator
import scala.collection.Iterator
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 70e428c9b6..433d054bfc 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index b83724090c..53fca9f779 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 55328a5d3d..bfe95a11ab 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index 00e20e7616..68df572517 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Parallel
@@ -33,11 +34,11 @@ import scala.collection.generic.Sizing
* @since 2.9
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-
+
@transient
@volatile
var _combinerTaskSupport = defaultTaskSupport
-
+
def combinerTaskSupport = {
val cts = _combinerTaskSupport
if (cts eq null) {
@@ -45,9 +46,9 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
defaultTaskSupport
} else cts
}
-
+
def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts
-
+
/** Combines the contents of the receiver builder and the `other` builder,
* producing a new builder containing both their elements.
*
@@ -81,7 +82,7 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
* By default, this method returns `false`.
*/
def canBeShared: Boolean = false
-
+
/** Constructs the result and sets the appropriate tasksupport object to the resulting collection
* if this is applicable.
*/
@@ -89,7 +90,7 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
val res = result()
setTaskSupport(res, combinerTaskSupport)
}
-
+
}
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index f170b944eb..2ceeb18eef 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.GenIterable
import scala.collection.generic._
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 961556faff..a0b42f248e 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel
-
-
-
+package scala
+package collection.parallel
import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
diff --git a/src/library/scala/collection/parallel/ParIterableView.scala b/src/library/scala/collection/parallel/ParIterableView.scala
index 7644e1bd12..6dce19db19 100644
--- a/src/library/scala/collection/parallel/ParIterableView.scala
+++ b/src/library/scala/collection/parallel/ParIterableView.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Parallel, IterableView, GenIterableView, Iterator }
import scala.collection.generic.CanCombineFrom
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 0567e7b396..1ec0ff9c32 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Parallel
import scala.collection.{ IterableView, IterableViewLike }
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 1f27ae830a..9f92e6c1e8 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Map
import scala.collection.GenMap
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 798ba71b95..ee14324c41 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index dee523ad89..b4a30e5dc2 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParCompanion
@@ -53,26 +51,3 @@ object ParSeq extends ParFactory[ParSeq] {
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 68bc1bc12c..6693e30fcd 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Parallel, SeqLike, GenSeqLike, GenSeq, GenIterable, Iterator }
import scala.collection.generic.DefaultSignalling
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
index 9acc4b0b73..b80994514b 100644
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ b/src/library/scala/collection/parallel/ParSeqView.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ SeqView, Parallel, Iterator }
import scala.collection.generic.CanCombineFrom
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index f3dbe20e67..9d30a052de 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Parallel, SeqView, SeqViewLike, GenSeqView, GenSeqViewLike, GenSeq }
import scala.collection.{ GenIterable, GenTraversable, GenTraversableOnce, Iterator }
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index bc6d5c6245..ba3d23f0e4 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -6,17 +6,10 @@
** |/ **
\* */
+package scala
+package collection
+package parallel
-package scala.collection.parallel
-
-
-
-
-
-
-
-import scala.collection.Set
-import scala.collection.GenSet
import scala.collection.generic._
/** A template trait for parallel sets.
@@ -29,12 +22,12 @@ import scala.collection.generic._
* @since 2.9
*/
trait ParSet[T]
-extends GenSet[T]
+ extends GenSet[T]
with GenericParTemplate[T, ParSet]
with ParIterable[T]
with ParSetLike[T, ParSet[T], Set[T]]
-{
-self =>
+{ self =>
+
override def empty: ParSet[T] = mutable.ParHashSet[T]()
//protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T]
@@ -44,39 +37,8 @@ self =>
override def stringPrefix = "ParSet"
}
-
-
object ParSet extends ParSetFactory[ParSet] {
def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T]
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 20a5f693ce..a50d2ae430 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
index 42563f4dc9..2eb202ce05 100644
--- a/src/library/scala/collection/parallel/PreciseSplitter.scala
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Seq
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index a3a47e2e40..5f2ceac0e0 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
import scala.collection.Parallel
import scala.collection.generic.Signalling
@@ -21,8 +19,6 @@ import scala.collection.Iterator.empty
import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
-
-
private[collection] trait RemainsIterator[+T] extends Iterator[T] {
/** The number of elements this iterator has yet to iterate.
* This method doesn't change the state of the iterator.
@@ -35,7 +31,6 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
def isRemainingCheap = true
}
-
/** Augments iterators with additional methods, mostly transformers,
* assuming they iterate an iterable collection.
*
@@ -532,10 +527,8 @@ self =>
}
def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
-
}
-
/** Parallel sequence iterators allow splitting into arbitrary subsets.
*
* @tparam T type of the elements iterated.
@@ -676,37 +669,3 @@ self =>
def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced)
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index 458742df96..8329f15d88 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Seq, Iterator }
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 1ab41bd296..84bb5e425b 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 487b3d46d3..4aa11b25da 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
@@ -71,11 +72,13 @@ trait Task[R, +Tp] {
}
private[parallel] def mergeThrowables(that: Task[_, _]) {
- if (this.throwable != null && that.throwable != null) {
- // merge exceptions, since there were multiple exceptions
- this.throwable = this.throwable alongWith that.throwable
- } else if (that.throwable != null) this.throwable = that.throwable
- else this.throwable = this.throwable
+ // TODO: As soon as we target Java >= 7, use Throwable#addSuppressed
+ // to pass additional Throwables to the caller, e. g.
+ // if (this.throwable != null && that.throwable != null)
+ // this.throwable.addSuppressed(that.throwable)
+ // For now, we just use whatever Throwable comes across “first”.
+ if (this.throwable == null && that.throwable != null)
+ this.throwable = that.throwable
}
// override in concrete task implementations to signal abort to other tasks
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index f3be47ea03..854d0ba918 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 4f34993b85..65a632470e 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index ec07e44c4d..417622facc 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -6,13 +6,11 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package parallel.immutable
-
import scala.collection.generic._
-
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
@@ -28,22 +26,18 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParIterable[+T]
-extends scala.collection/*.immutable*/.GenIterable[T]
+extends scala.collection.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
-
// if `immutable.ParIterableLike` is introduced, please move these 4 methods there
override def toIterable: ParIterable[T] = this
-
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
-
}
-
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
@@ -51,21 +45,5 @@ object ParIterable extends ParFactory[ParIterable] {
new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T]
-
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T]
-
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index e904a7616b..4d99006d0f 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
import scala.collection.generic.ParMapFactory
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 78bbad5933..ec90de3a7d 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
@@ -106,7 +107,7 @@ self =>
cb
}
}
-
+
}
object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index b54a5f0205..6e98b3102d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index aba8486ab5..7837d6f264 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
import scala.collection.generic._
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ee7f4ae69..548e7112c7 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 5ca0724ffc..8fd84eaf4d 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
package immutable {
/** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method.
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 12b2bc5008..cc25b5b4b2 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
import scala.collection.generic.Growable
import scala.collection.generic.Sizing
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index f9563cacc7..d0d022db4b 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -29,8 +29,6 @@ import scala.collection.mutable.Builder
import scala.collection.GenTraversableOnce
import scala.reflect.ClassTag
-
-
/** Parallel sequence holding elements in a linear array.
*
* `ParArray` is a parallel sequence with a predefined size. The size of the array
@@ -702,7 +700,7 @@ object ParArray extends ParFactory[ParArray] {
private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match {
case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz))
- case _ => new ParArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
+ case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz))
}
def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = {
@@ -720,27 +718,3 @@ object ParArray extends ParFactory[ParArray] {
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index aa790dd548..afc2d6e987 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package parallel.mutable
import scala.collection.parallel.IterableSplitter
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index e94db89865..42a3302c91 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
package mutable
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 0287171369..1e3d57e0e5 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 5aa1dba17c..a6fada3d42 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.mutable
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index d76e4b1745..4659149106 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
-
+package scala
+package collection
+package parallel.mutable
import scala.collection.generic._
-import scala.collection.parallel.ParIterableLike
-import scala.collection.parallel.Combiner
+import scala.collection.parallel.{ ParIterableLike, Combiner }
/** A template trait for mutable parallel iterable collections.
*
@@ -24,7 +24,7 @@ import scala.collection.parallel.Combiner
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
+trait ParIterable[T] extends scala.collection.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]]
@@ -43,24 +43,8 @@ trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] =
- new GenericCanCombineFrom[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
-
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 2250a38466..e43e72e559 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -6,17 +6,13 @@
** |/ **
\* */
-
-package scala.collection.parallel.mutable
-
-
-
+package scala
+package collection
+package parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
-
-
/** A template trait for mutable parallel maps.
*
* $sideeffects
@@ -28,11 +24,11 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends scala.collection/*.mutable*/.GenMap[K, V]
- with scala.collection.parallel.ParMap[K, V]
- with /* mutable */ ParIterable[(K, V)]
+extends GenMap[K, V]
+ with parallel.ParMap[K, V]
+ with ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
@@ -63,11 +59,8 @@ extends scala.collection/*.mutable*/.GenMap[K, V]
* @return a wrapper of the map with a default value
*/
def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
-
}
-
-
object ParMap extends ParMapFactory[ParMap] {
def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V]
@@ -94,22 +87,3 @@ object ParMap extends ParMapFactory[ParMap] {
override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d)
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 08bc706c8a..d96b5482fe 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
package mutable
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 8a55ab83f1..35be2669f8 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel.mutable
-
+package scala
+package collection.parallel.mutable
import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericCompanion
@@ -51,18 +50,3 @@ object ParSeq extends ParFactory[ParSeq] {
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index ca41852512..9367f1424d 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel.mutable
-
-
+package scala
+package collection.parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
@@ -46,4 +44,3 @@ object ParSet extends ParSetFactory[ParSet] {
override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner
}
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 0941229124..1cfc14b094 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.mutable
import scala.collection.mutable.Cloneable
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 60f4709a8c..82f2717132 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 0b9b51bc5b..79322c85b1 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index 7766f07e23..6b6ea03a6d 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala
index 2494d0907e..81121d9398 100644
--- a/src/library/scala/collection/parallel/mutable/package.scala
+++ b/src/library/scala/collection/parallel/mutable/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.ArraySeq
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 66fbad7643..b25553d2c8 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -114,7 +114,9 @@ package parallel {
def toParArray: ParArray[T]
}
+ @deprecated("This trait will be removed.", "2.11.0")
trait ThrowableOps {
+ @deprecated("This method will be removed.", "2.11.0")
def alongWith(that: Throwable): Throwable
}
@@ -133,9 +135,8 @@ package parallel {
}
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
- final case class CompositeThrowable(
- throwables: Set[Throwable]
- ) extends Exception(
+ @deprecated("This class will be removed.", "2.11.0")
+ final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception(
"Multiple exceptions thrown during a parallel computation: " +
throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
)
diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala
index cd64fa2d73..e485737972 100644
--- a/src/library/scala/collection/script/Location.scala
+++ b/src/library/scala/collection/script/Location.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package script
/** Class `Location` describes locations in messages implemented by
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 7428cd2b81..dc3e74e170 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package script
import mutable.ArrayBuffer
diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala
index ceaf19a464..110a0f4d82 100644
--- a/src/library/scala/collection/script/Scriptable.scala
+++ b/src/library/scala/collection/script/Scriptable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package script
/** Classes that mix in the `Scriptable` class allow messages to be sent to
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 88cb1506ae..875d811b9b 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.compat
+package scala
+package compat
import java.lang.System
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 8928724b34..68513f9c80 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -25,7 +25,7 @@ trait ExecutionContext {
/** Reports that an asynchronous computation failed.
*/
- def reportFailure(t: Throwable): Unit
+ def reportFailure(@deprecatedName('t) cause: Throwable): Unit
/** Prepares for the execution of a task. Returns the prepared
* execution context. A valid implementation of `prepare` is one
@@ -83,7 +83,7 @@ object ExecutionContext {
/** The default reporter simply prints the stack trace of the `Throwable` to System.err.
*/
- def defaultReporter: Throwable => Unit = (t: Throwable) => t.printStackTrace()
+ def defaultReporter: Throwable => Unit = _.printStackTrace()
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 95b393dd0e..2e4c72cd71 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -71,7 +71,7 @@ import scala.reflect.ClassTag
* val g = future { 3 }
* val h = for {
* x: Int <- f // returns Future(5)
- * y: Int <- g // returns Future(5)
+ * y: Int <- g // returns Future(3)
* } yield x + y
* }}}
*
@@ -133,8 +133,8 @@ trait Future[+T] extends Awaitable[T] {
* $multipleCallbacks
* $callbackInContext
*/
- def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Failure(t) if NonFatal(t) && callback.isDefinedAt(t) => callback(t)
+ def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
+ case Failure(t) if NonFatal(t) && pf.isDefinedAt(t) => pf(t)
case _ =>
}(executor)
@@ -147,7 +147,7 @@ trait Future[+T] extends Awaitable[T] {
* $multipleCallbacks
* $callbackInContext
*/
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit
+ def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit
/* Miscellaneous */
@@ -303,21 +303,21 @@ trait Future[+T] extends Awaitable[T] {
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
- def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
- val p = Promise[T]()
+ def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
+ val promise = Promise[T]()
onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[T]]
+ case f: Failure[_] => promise complete f.asInstanceOf[Failure[T]]
case Success(v) =>
try {
- if (pred(v)) p success v
- else p failure new NoSuchElementException("Future.filter predicate is not satisfied")
+ if (p(v)) promise success v
+ else promise failure new NoSuchElementException("Future.filter predicate is not satisfied")
} catch {
- case NonFatal(t) => p failure t
+ case NonFatal(t) => promise failure t
}
}(executor)
- p.future
+ promise.future
}
/** Used by for-comprehensions.
@@ -565,16 +565,16 @@ object Future {
*
* @tparam T the type of the result
* @param body the asychronous computation
- * @param execctx the execution context on which the future is run
+ * @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
- def apply[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = impl.Future(body)
+ def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body)
/** Simple version of `Futures.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
* Useful for reducing many `Future`s into a single `Future`.
*/
def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
- in.foldLeft(Promise.successful(cbf(in)).future) {
+ in.foldLeft(successful(cbf(in))) {
(fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
} map (_.result())
}
@@ -592,15 +592,15 @@ object Future {
/** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
*/
- def find[T](futurestravonce: TraversableOnce[Future[T]])(predicate: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
- val futures = futurestravonce.toBuffer
- if (futures.isEmpty) Promise.successful[Option[T]](None).future
+ def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ val futuresBuffer = futures.toBuffer
+ if (futuresBuffer.isEmpty) successful[Option[T]](None)
else {
val result = Promise[Option[T]]()
- val ref = new AtomicInteger(futures.size)
+ val ref = new AtomicInteger(futuresBuffer.size)
val search: Try[T] => Unit = v => try {
v match {
- case Success(r) => if (predicate(r)) result tryComplete Success(Some(r))
+ case Success(r) if p(r) => result tryComplete Success(Some(r))
case _ =>
}
} finally {
@@ -609,7 +609,7 @@ object Future {
}
}
- futures.foreach(_ onComplete search)
+ futuresBuffer.foreach(_ onComplete search)
result.future
}
@@ -625,9 +625,9 @@ object Future {
* val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
* }}}
*/
- def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise.successful(zero).future
- else sequence(futures).map(_.foldLeft(zero)(foldFun))
+ def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ if (futures.isEmpty) successful(zero)
+ else sequence(futures).map(_.foldLeft(zero)(op))
}
/** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
@@ -638,7 +638,7 @@ object Future {
* }}}
*/
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise[R]().failure(new NoSuchElementException("reduce attempted on empty collection")).future
+ if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection"))
else sequence(futures).map(_ reduceLeft op)
}
@@ -651,7 +651,7 @@ object Future {
* }}}
*/
def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
- in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) =>
+ in.foldLeft(successful(cbf(in))) { (fr, a) =>
val fb = fn(a.asInstanceOf[A])
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result())
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index 9e27ce65b9..089e67cedd 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -16,7 +16,7 @@ import scala.language.{implicitConversions, higherKinds}
* @author Philipp Haller
*/
@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait FutureTaskRunner extends TaskRunner {
+private[scala] trait FutureTaskRunner extends TaskRunner {
/** The type of the futures that the underlying task runner supports.
*/
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index 7b2966c663..b5a6e21893 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -13,7 +13,7 @@ package scala.concurrent
* @author Philipp Haller
*/
@deprecated("Use `blocking` instead.", "2.10.0")
-trait ManagedBlocker {
+private[scala] trait ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 8355a73a1f..f950b13b78 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -86,7 +86,7 @@ trait Promise[T] {
*
* $promiseCompletion
*/
- def success(v: T): this.type = complete(Success(v))
+ def success(@deprecatedName('v) value: T): this.type = complete(Success(value))
/** Tries to complete the promise with a value.
*
@@ -104,7 +104,7 @@ trait Promise[T] {
*
* $promiseCompletion
*/
- def failure(t: Throwable): this.type = complete(Failure(t))
+ def failure(@deprecatedName('t) cause: Throwable): this.type = complete(Failure(cause))
/** Tries to complete the promise with an exception.
*
@@ -112,7 +112,7 @@ trait Promise[T] {
*
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
- def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
+ def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause))
}
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 6d25ffe19e..76d21c3dbf 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -79,6 +79,7 @@ class SyncVar[A] {
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
@deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+ // NOTE: Used by SBT 0.13.0-M2 and below
def set(x: A): Unit = setVal(x)
/** Places a value in the SyncVar. If the SyncVar already has a stored value,
@@ -98,6 +99,7 @@ class SyncVar[A] {
// whether or not the SyncVar is already defined. So, unset has been
// deprecated in order to eventually be able to make "unsetting" private
@deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
+ // NOTE: Used by SBT 0.13.0-M2 and below
def unset(): Unit = synchronized {
isDefined = false
value = None
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index a939a3f070..98c212d9fa 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -15,7 +15,7 @@ import scala.language.{higherKinds, implicitConversions}
* @author Philipp Haller
*/
@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait TaskRunner {
+private[scala] trait TaskRunner {
type Task[T]
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index afa14ed2fa..7784681f71 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -17,7 +17,7 @@ import scala.language.implicitConversions
* @author Philipp Haller
*/
@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait ThreadPoolRunner extends FutureTaskRunner {
+private[scala] trait ThreadPoolRunner extends FutureTaskRunner {
type Task[T] = Callable[T] with Runnable
type Future[T] = java.util.concurrent.Future[T]
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 6c6155279d..9a8844b489 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -10,6 +10,7 @@ package scala.concurrent.duration
import java.lang.{ Double => JDouble, Long => JLong }
import scala.language.implicitConversions
+import scala.language.postfixOps
object Duration {
@@ -220,6 +221,8 @@ object Duration {
final def toMinutes: Long = fail("toMinutes")
final def toHours: Long = fail("toHours")
final def toDays: Long = fail("toDays")
+
+ final def toCoarsest: Duration = this
}
/**
@@ -519,6 +522,18 @@ sealed abstract class Duration extends Serializable with Ordered[Duration] {
* $ovf
*/
def plus(other: Duration) = this + other
+ /**
+ * Return duration which is equal to this duration but with a coarsest Unit, or self in case it is already the coarsest Unit
+ * <p/>
+ * Examples:
+ * {{{
+ * Duration(60, MINUTES).toCoarsest // Duration(1, HOURS)
+ * Duration(1000, MILLISECONDS).toCoarsest // Duration(1, SECONDS)
+ * Duration(48, HOURS).toCoarsest // Duration(2, DAYS)
+ * Duration(5, SECONDS).toCoarsest // Duration(5, SECONDS)
+ * }}}
+ */
+ def toCoarsest: Duration
}
object FiniteDuration {
@@ -663,8 +678,8 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
* Long.MinValue is not a legal `length` anyway.
*/
private def safeMul(_a: Long, _b: Long): Long = {
- val a = math.abs(_a)
- val b = math.abs(_b)
+ val a = scala.math.abs(_a)
+ val b = scala.math.abs(_b)
import java.lang.Long.{ numberOfLeadingZeros => leading }
if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow")
val product = a * b
@@ -690,6 +705,28 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
final def isFinite() = true
+ final def toCoarsest: Duration = {
+ def loop(length: Long, unit: TimeUnit): FiniteDuration = {
+ def coarserOrThis(coarser: TimeUnit, divider: Int) =
+ if (length % divider == 0) loop(length / divider, coarser)
+ else if (unit == this.unit) this
+ else FiniteDuration(length, unit)
+
+ unit match {
+ case DAYS => FiniteDuration(length, unit)
+ case HOURS => coarserOrThis(DAYS, 24)
+ case MINUTES => coarserOrThis(HOURS, 60)
+ case SECONDS => coarserOrThis(MINUTES, 60)
+ case MILLISECONDS => coarserOrThis(SECONDS, 1000)
+ case MICROSECONDS => coarserOrThis(MILLISECONDS, 1000)
+ case NANOSECONDS => coarserOrThis(MICROSECONDS, 1000)
+ }
+ }
+
+ if (unit == DAYS || length == 0) this
+ else loop(length, unit)
+ }
+
override def equals(other: Any) = other match {
case x: FiniteDuration => toNanos == x.toNanos
case _ => super.equals(other)
diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala
index 2fd735f19e..b32d2b20cb 100644
--- a/src/library/scala/concurrent/duration/package.scala
+++ b/src/library/scala/concurrent/duration/package.scala
@@ -36,12 +36,12 @@ package object duration {
final val NANOSECONDS = java.util.concurrent.TimeUnit.NANOSECONDS
final val SECONDS = java.util.concurrent.TimeUnit.SECONDS
- implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1, p._2)
+ implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1.toLong, p._2)
implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2)
implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit)
implicit final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
- override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n.toLong, unit)
}
implicit final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
@@ -60,16 +60,16 @@ package object duration {
* Avoid reflection based invocation by using non-duck type
*/
implicit final class IntMult(val i: Int) extends AnyVal {
- def *(d: Duration) = d * i
- def *(d: FiniteDuration) = d * i
+ def *(d: Duration) = d * i.toDouble
+ def *(d: FiniteDuration) = d * i.toLong
}
implicit final class LongMult(val i: Long) extends AnyVal {
- def *(d: Duration) = d * i
- def *(d: FiniteDuration) = d * i
+ def *(d: Duration) = d * i.toDouble
+ def *(d: FiniteDuration) = d * i.toLong
}
implicit final class DoubleMult(val f: Double) extends AnyVal {
- def *(d: Duration) = d * f
+ def *(d: Duration) = d * f.toDouble
}
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index e4a0f464f9..479720287c 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -30,7 +30,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
// Implement BlockContext on FJP threads
- class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+ class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
@@ -57,22 +57,22 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
def createExecutorService: ExecutorService = {
- def getInt(name: String, f: String => Int): Int =
- try f(System.getProperty(name)) catch { case e: Exception => Runtime.getRuntime.availableProcessors }
- def range(floor: Int, desired: Int, ceiling: Int): Int =
- if (ceiling < floor) range(ceiling, desired, floor) else scala.math.min(scala.math.max(desired, floor), ceiling)
+ def getInt(name: String, default: String) = (try System.getProperty(name, default) catch {
+ case e: SecurityException => default
+ }) match {
+ case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
+ case other => other.toInt
+ }
+
+ def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling)
val desiredParallelism = range(
- getInt("scala.concurrent.context.minThreads", _.toInt),
- getInt("scala.concurrent.context.numThreads", {
- case null | "" => Runtime.getRuntime.availableProcessors
- case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
- case other => other.toInt
- }),
- getInt("scala.concurrent.context.maxThreads", _.toInt))
+ getInt("scala.concurrent.context.minThreads", "1"),
+ getInt("scala.concurrent.context.numThreads", "x1"),
+ getInt("scala.concurrent.context.maxThreads", "x1"))
val threadFactory = new DefaultThreadFactory(daemonic = true)
-
+
try {
new ForkJoinPool(
desiredParallelism,
@@ -98,13 +98,13 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
- val fjt = runnable match {
+ val fjt: ForkJoinTask[_] = runnable match {
case t: ForkJoinTask[_] => t
- case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
}
Thread.currentThread match {
case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
- case _ => fj execute fjt
+ case _ => fj execute fjt
}
case generic => generic execute runnable
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 055ce6e4fa..042d32c234 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -28,7 +28,7 @@ private[concurrent] object Future {
def apply[T](body: =>T)(implicit executor: ExecutionContext): scala.concurrent.Future[T] = {
val runnable = new PromiseCompletingRunnable(body)
- executor.execute(runnable)
+ executor.prepare.execute(runnable)
runnable.promise.future
}
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 7af70400ef..ffaea8de96 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -8,11 +8,14 @@
package scala.concurrent.impl
-import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException }
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.Future.InternalCallbackExecutor
import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{ Try, Success, Failure }
+import java.io.ObjectInputStream
+import java.util.concurrent.locks.AbstractQueuedSynchronizer
private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
def future: this.type = this
@@ -53,69 +56,68 @@ private[concurrent] object Promise {
case t => Failure(t)
}
+ /*
+ * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+ private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) {
+ override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1
+ override protected def tryReleaseShared(ignore: Int): Boolean = {
+ setState(1)
+ true
+ }
+ override def apply(ignored: Try[T]): Unit = releaseShared(1)
+ }
+
+
/** Default promise implementation.
*/
class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
updateState(null, Nil) // Start at "No callbacks"
- protected final def tryAwait(atMost: Duration): Boolean = {
- @tailrec
- def awaitUnsafe(deadline: Deadline, nextWait: FiniteDuration): Boolean = {
- if (!isCompleted && nextWait > Duration.Zero) {
- val ms = nextWait.toMillis
- val ns = (nextWait.toNanos % 1000000l).toInt // as per object.wait spec
-
- synchronized { if (!isCompleted) wait(ms, ns) }
-
- awaitUnsafe(deadline, deadline.timeLeft)
- } else
- isCompleted
- }
- @tailrec
- def awaitUnbounded(): Boolean = {
- if (isCompleted) true
- else {
- synchronized { if (!isCompleted) wait() }
- awaitUnbounded()
- }
- }
-
+ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
import Duration.Undefined
+ import scala.concurrent.Future.InternalCallbackExecutor
atMost match {
- case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
- case Duration.Inf => awaitUnbounded()
- case Duration.MinusInf => isCompleted
- case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
+ case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf =>
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.acquireSharedInterruptibly(1)
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) {
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.tryAcquireSharedNanos(1, f.toNanos)
+ }
}
- }
+
+ isCompleted
+ } else true // Already completed
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
- if (isCompleted || tryAwait(atMost)) this
+ if (tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
def result(atMost: Duration)(implicit permit: CanAwait): T =
- ready(atMost).value.get match {
- case Failure(e) => throw e
- case Success(r) => r
- }
+ ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
def value: Option[Try[T]] = getState match {
case c: Try[_] => Some(c.asInstanceOf[Try[T]])
case _ => None
}
- override def isCompleted: Boolean = getState match { // Cheaper than boxing result into Option due to "def value"
- case _: Try[_] => true
- case _ => false
- }
+ override def isCompleted: Boolean = getState.isInstanceOf[Try[_]]
def tryComplete(value: Try[T]): Boolean = {
val resolved = resolveTry(value)
- (try {
- @tailrec
+ @tailrec
def tryComplete(v: Try[T]): List[CallbackRunnable[T]] = {
getState match {
case raw: List[_] =>
@@ -124,10 +126,7 @@ private[concurrent] object Promise {
case _ => null
}
}
- tryComplete(resolved)
- } finally {
- synchronized { notifyAll() } //Notify any evil blockers
- }) match {
+ tryComplete(resolved) match {
case null => false
case rs if rs.isEmpty => true
case rs => rs.foreach(r => r.executeWithValue(resolved)); true
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 3e849f1722..50a66a622a 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -24,10 +24,10 @@ package object concurrent {
*
* @tparam T the type of the result
* @param body the asynchronous computation
- * @param execctx the execution context on which the future is run
+ * @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
- def future[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = Future[T](body)
+ def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body)
/** Creates a promise object which can be completed with a value or an exception.
*
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
index 70065560b1..7d20219d4d 100644
--- a/src/library/scala/deprecatedInheritance.scala
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -11,7 +11,8 @@ package scala
/** An annotation that designates that inheriting from a class is deprecated.
*
* This is usually done to warn about a non-final class being made final in a future version.
- * Sub-classing such a class then generates a warning.
+ * Sub-classing such a class then generates a warning. No warnings are generated if the
+ * subclass is in the same compilation unit.
*
* @param message the message to print during compilation if the class was sub-classed
* @param since a string identifying the first version in which inheritance was deprecated
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index e250da27c3..c170d28127 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -55,7 +55,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
// immediately on the source.
if (charReaderCreated && iter.hasNext) {
val pb = new PushbackReader(charReader)
- pb unread iter.next()
+ pb unread iter.next().toInt
new BufferedReader(pb, bufferSize)
}
else charReader
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index bda4234460..60f99199cb 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.io
+package scala
+package io
import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
import scala.annotation.migration
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index b96349803d..85149223ee 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.io
+package scala
+package io
/** The object Position provides convenience methods to encode
* line and column number in one single integer. The encoded line
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index f976c7eb0a..74c3e06839 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.io
+package scala
+package io
import scala.collection.AbstractIterator
import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index d8f4337b8f..00a3686585 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -7,7 +7,8 @@
\* */
-package scala.math
+package scala
+package math
import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
@@ -43,6 +44,7 @@ object BigDecimal {
*/
def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d)
def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc)
+ def valueOf(x: Long): BigDecimal = apply(x.toDouble)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `Integer` value.
@@ -55,10 +57,10 @@ object BigDecimal {
if (mc == defaultMathContext && minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
- if (n eq null) { n = new BigDecimal(BigDec.valueOf(i), mc); cache(offset) = n }
+ if (n eq null) { n = new BigDecimal(BigDec.valueOf(i.toLong), mc); cache(offset) = n }
n
}
- else new BigDecimal(BigDec.valueOf(i), mc)
+ else new BigDecimal(BigDec.valueOf(i.toLong), mc)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified long value.
@@ -98,6 +100,9 @@ object BigDecimal {
def apply(d: Double, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(jl.Double.toString(d), mc), mc)
+ def apply(x: Float): BigDecimal = apply(x.toDouble)
+ def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc)
+
/** Translates a character array representation of a `BigDecimal`
* into a `BigDecimal`.
*/
@@ -192,7 +197,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
*/
def isValidFloat = {
val f = toFloat
- !f.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(f)) == 0
+ !f.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(f.toDouble)) == 0
}
/** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`.
*/
@@ -333,21 +338,21 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
override def byteValue = intValue.toByte
/** Converts this BigDecimal to a Short.
- * If the BigDecimal is too big to fit in a Byte, only the low-order 16 bits are returned.
+ * If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value as well as return a result with the opposite sign.
*/
override def shortValue = intValue.toShort
/** Converts this BigDecimal to a Char.
- * If the BigDecimal is too big to fit in a char, only the low-order 16 bits are returned.
+ * If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value and that it always returns a positive result.
*/
def charValue = intValue.toChar
/** Converts this BigDecimal to an Int.
- * If the BigDecimal is too big to fit in a char, only the low-order 32 bits
+ * If the BigDecimal is too big to fit in an Int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
@@ -355,7 +360,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
def intValue = this.bigDecimal.intValue
/** Converts this BigDecimal to a Long.
- * If the BigDecimal is too big to fit in a char, only the low-order 64 bits
+ * If the BigDecimal is too big to fit in a Long, only the low-order 64 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 719099b405..afc4d5c535 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.math.BigInteger
import scala.language.implicitConversions
@@ -33,9 +34,9 @@ object BigInt {
if (minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
- if (n eq null) { n = new BigInt(BigInteger.valueOf(i)); cache(offset) = n }
+ if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n }
n
- } else new BigInt(BigInteger.valueOf(i))
+ } else new BigInt(BigInteger.valueOf(i.toLong))
/** Constructs a `BigInt` whose value is equal to that of the
* specified long value.
diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala
index 5f5e049941..45b2b3629d 100644
--- a/src/library/scala/math/Equiv.scala
+++ b/src/library/scala/math/Equiv.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index ca33675b0a..b7e0ed5471 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index f3684c4e5d..ff1f695f6d 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -8,7 +8,8 @@
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index 5a76f4f5f2..e6644c0dfc 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
@@ -50,9 +51,9 @@ object Numeric {
def negate(x: Int): Int = -x
def fromInt(x: Int): Int = x
def toInt(x: Int): Int = x
- def toLong(x: Int): Long = x
- def toFloat(x: Int): Float = x
- def toDouble(x: Int): Double = x
+ def toLong(x: Int): Long = x.toLong
+ def toFloat(x: Int): Float = x.toFloat
+ def toDouble(x: Int): Double = x.toDouble
}
implicit object IntIsIntegral extends IntIsIntegral with Ordering.IntOrdering
@@ -108,11 +109,11 @@ object Numeric {
def quot(x: Long, y: Long): Long = x / y
def rem(x: Long, y: Long): Long = x % y
def negate(x: Long): Long = -x
- def fromInt(x: Int): Long = x
+ def fromInt(x: Int): Long = x.toLong
def toInt(x: Long): Int = x.toInt
def toLong(x: Long): Long = x
- def toFloat(x: Long): Float = x
- def toDouble(x: Long): Double = x
+ def toFloat(x: Long): Float = x.toFloat
+ def toDouble(x: Long): Double = x.toDouble
}
implicit object LongIsIntegral extends LongIsIntegral with Ordering.LongOrdering
@@ -121,11 +122,11 @@ object Numeric {
def minus(x: Float, y: Float): Float = x - y
def times(x: Float, y: Float): Float = x * y
def negate(x: Float): Float = -x
- def fromInt(x: Int): Float = x
+ def fromInt(x: Int): Float = x.toFloat
def toInt(x: Float): Int = x.toInt
def toLong(x: Float): Long = x.toLong
def toFloat(x: Float): Float = x
- def toDouble(x: Float): Double = x
+ def toDouble(x: Float): Double = x.toDouble
}
trait FloatIsFractional extends FloatIsConflicted with Fractional[Float] {
def div(x: Float, y: Float): Float = x / y
@@ -143,7 +144,7 @@ object Numeric {
def minus(x: Double, y: Double): Double = x - y
def times(x: Double, y: Double): Double = x * y
def negate(x: Double): Double = -x
- def fromInt(x: Int): Double = x
+ def fromInt(x: Int): Double = x.toDouble
def toInt(x: Double): Int = x.toInt
def toLong(x: Double): Long = x.toLong
def toFloat(x: Double): Float = x.toFloat
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index e8be92eb4a..51f2765a63 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala
index a9e317d536..9e35381528 100644
--- a/src/library/scala/math/PartialOrdering.scala
+++ b/src/library/scala/math/PartialOrdering.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
/** A trait for representing partial orderings. It is important to
* distinguish between a type that has a partial order and a representation
diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala
index 7823e5b396..f58210d6a7 100644
--- a/src/library/scala/math/PartiallyOrdered.scala
+++ b/src/library/scala/math/PartiallyOrdered.scala
@@ -8,7 +8,8 @@
-package scala.math
+package scala
+package math
/** A class for partially ordered data.
*
diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java
index 7345147b0d..f03ba7bf08 100644
--- a/src/library/scala/math/ScalaNumber.java
+++ b/src/library/scala/math/ScalaNumber.java
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.math;
/** A marker class for Number types introduced by Scala
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index e748841c12..336991781e 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
/** A slightly more specific conversion trait for classes which
* extend ScalaNumber (which excludes value classes.)
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index cb033bda2c..af5dad5c38 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -16,12 +16,12 @@ package object math {
/** The `double` value that is closer than any other to `e`, the base of
* the natural logarithms.
*/
- val E = java.lang.Math.E
+ @inline final val E = java.lang.Math.E
/** The `double` value that is closer than any other to `pi`, the ratio of
* the circumference of a circle to its diameter.
*/
- val Pi = java.lang.Math.PI
+ @inline final val Pi = java.lang.Math.PI
/** Returns a `double` value with a positive sign, greater than or equal
* to `0.0` and less than `1.0`.
@@ -62,7 +62,7 @@ package object math {
def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
- def ceil(x: Double): Double = java.lang.Math.ceil(x)
+ def ceil(x: Double): Double = java.lang.Math.ceil(x)
def floor(x: Double): Double = java.lang.Math.floor(x)
/** Returns the `double` value that is closest in value to the
@@ -100,24 +100,30 @@ package object math {
*/
def round(x: Float): Int = java.lang.Math.round(x)
def round(x: Double): Long = java.lang.Math.round(x)
- def abs(x: Int): Int = java.lang.Math.abs(x)
- def abs(x: Long): Long = java.lang.Math.abs(x)
- def abs(x: Float): Float = java.lang.Math.abs(x)
+
+ def abs(x: Int): Int = java.lang.Math.abs(x)
+ def abs(x: Long): Long = java.lang.Math.abs(x)
+ def abs(x: Float): Float = java.lang.Math.abs(x)
def abs(x: Double): Double = java.lang.Math.abs(x)
- def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
- def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
- def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+ def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
+ def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
+ def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
- def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
- def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
- def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
+ def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
+ def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
+ def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
- def signum(x: Int): Int = java.lang.Integer.signum(x)
- def signum(x: Long): Long = java.lang.Long.signum(x)
- def signum(x: Float): Float = java.lang.Math.signum(x)
+ /** Note that these are not pure forwarders to the java versions.
+ * In particular, the return type of java.lang.Long.signum is Int,
+ * but here it is widened to Long so that each overloaded variant
+ * will return the same numeric type it is passed.
+ */
+ def signum(x: Int): Int = java.lang.Integer.signum(x)
+ def signum(x: Long): Long = java.lang.Long.signum(x)
+ def signum(x: Float): Float = java.lang.Math.signum(x)
def signum(x: Double): Double = java.lang.Math.signum(x)
// -----------------------------------------------------------------------
diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
index 0a3d818fb9..798746851a 100644
--- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
+++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
import java.lang.{ Class => jClass }
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index f62d0ecd16..3bc76da295 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index 6192971c74..a8430548f5 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -30,9 +30,9 @@ object NameTransformer {
private val op2code = new Array[String](nops)
private val code2op = new Array[OpCodes](ncodes)
private def enterOp(op: Char, code: String) = {
- op2code(op) = code
+ op2code(op.toInt) = code
val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a'
- code2op(c) = new OpCodes(op, code, code2op(c))
+ code2op(c.toInt) = new OpCodes(op, code, code2op(c))
}
/* Note: decoding assumes opcodes are only ever lowercase. */
@@ -66,12 +66,12 @@ object NameTransformer {
var i = 0
while (i < len) {
val c = name charAt i
- if (c < nops && (op2code(c) ne null)) {
+ if (c < nops && (op2code(c.toInt) ne null)) {
if (buf eq null) {
buf = new StringBuilder()
buf.append(name.substring(0, i))
}
- buf.append(op2code(c))
+ buf.append(op2code(c.toInt))
/* Handle glyphs that are not valid Java/JVM identifiers */
}
else if (!Character.isJavaIdentifierPart(c)) {
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index 61bc5e28d3..2ef946c80c 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** One of the branches of an [[scala.reflect.OptManifest]].
*/
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 5e373c7318..b69f55483c 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]].
*
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 10e6d7d9a4..74c1f2172c 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,5 +1,7 @@
package scala
+import java.lang.reflect.{ AccessibleObject => jAccessibleObject }
+
package object reflect {
// in the new scheme of things ClassManifests are aliased to ClassTags
@@ -42,11 +44,23 @@ package object reflect {
def classTag[T](implicit ctag: ClassTag[T]) = ctag
+ /** Make a java reflection object accessible, if it is not already
+ * and it is possible to do so. If a SecurityException is thrown in the
+ * attempt, it is caught and discarded.
+ */
+ def ensureAccessible[T <: jAccessibleObject](m: T): T = {
+ if (!m.isAccessible) {
+ try m setAccessible true
+ catch { case _: SecurityException => } // does nothing
+ }
+ m
+ }
+
// anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala
// implementation is hardwired into `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
// todo. once we have implicit macros for tag generation, we can remove this anchor
- private[scala] def materializeClassTag[T](): ClassTag[T] = ??? // macro
+ private[scala] def materializeClassTag[T](): ClassTag[T] = macro ???
@deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
type BeanDescription = scala.beans.BeanDescription
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 57f8e2603b..e3516bc4d9 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -6,7 +6,10 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
+import scala.annotation.unspecialized
/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
* in terms of `isDefinedAt` and `applyOrElse`.
diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java
index 889db31e2a..92e8055351 100644
--- a/src/library/scala/runtime/BooleanRef.java
+++ b/src/library/scala/runtime/BooleanRef.java
@@ -17,4 +17,7 @@ public class BooleanRef implements java.io.Serializable {
public boolean elem;
public BooleanRef(boolean elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static BooleanRef create(boolean e) { return new BooleanRef(e); }
+ public static BooleanRef zero() { return new BooleanRef(false); }
}
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
index 8b531076ac..855f0ff41a 100644
--- a/src/library/scala/runtime/Boxed.scala
+++ b/src/library/scala/runtime/Boxed.scala
@@ -8,7 +8,9 @@
-package scala.runtime
+package scala
+package runtime
+
trait Boxed {
diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java
index cc10611e26..27d3259db3 100644
--- a/src/library/scala/runtime/ByteRef.java
+++ b/src/library/scala/runtime/ByteRef.java
@@ -17,4 +17,7 @@ public class ByteRef implements java.io.Serializable {
public byte elem;
public ByteRef(byte elem) { this.elem = elem; }
public String toString() { return java.lang.Byte.toString(elem); }
+
+ public static ByteRef create(byte e) { return new ByteRef(e); }
+ public static ByteRef zero() { return new ByteRef((byte)0); }
}
diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java
index 03d3337b3d..31956f5b55 100644
--- a/src/library/scala/runtime/CharRef.java
+++ b/src/library/scala/runtime/CharRef.java
@@ -17,4 +17,7 @@ public class CharRef implements java.io.Serializable {
public char elem;
public CharRef(char elem) { this.elem = elem; }
public String toString() { return java.lang.Character.toString(elem); }
+
+ public static CharRef create(char e) { return new CharRef(e); }
+ public static CharRef zero() { return new CharRef((char)0); }
}
diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java
index 317198ee48..0c7d9156d6 100644
--- a/src/library/scala/runtime/DoubleRef.java
+++ b/src/library/scala/runtime/DoubleRef.java
@@ -17,4 +17,7 @@ public class DoubleRef implements java.io.Serializable {
public double elem;
public DoubleRef(double elem) { this.elem = elem; }
public String toString() { return java.lang.Double.toString(elem); }
+
+ public static DoubleRef create(double e) { return new DoubleRef(e); }
+ public static DoubleRef zero() { return new DoubleRef(0); }
}
diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java
index e26b89be60..f0e1d5f8f3 100644
--- a/src/library/scala/runtime/FloatRef.java
+++ b/src/library/scala/runtime/FloatRef.java
@@ -17,4 +17,7 @@ public class FloatRef implements java.io.Serializable {
public float elem;
public FloatRef(float elem) { this.elem = elem; }
public String toString() { return java.lang.Float.toString(elem); }
+
+ public static FloatRef create(float e) { return new FloatRef(e); }
+ public static FloatRef zero() { return new FloatRef(0); }
}
diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java
index edb6fafe94..adcf474aae 100644
--- a/src/library/scala/runtime/IntRef.java
+++ b/src/library/scala/runtime/IntRef.java
@@ -17,4 +17,7 @@ public class IntRef implements java.io.Serializable {
public int elem;
public IntRef(int elem) { this.elem = elem; }
public String toString() { return java.lang.Integer.toString(elem); }
+
+ public static IntRef create(int e) { return new IntRef(e); }
+ public static IntRef zero() { return new IntRef(0); }
}
diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java
index 12004b5bb5..51426ab8f6 100644
--- a/src/library/scala/runtime/LongRef.java
+++ b/src/library/scala/runtime/LongRef.java
@@ -17,4 +17,7 @@ public class LongRef implements java.io.Serializable {
public long elem;
public LongRef(long elem) { this.elem = elem; }
public String toString() { return java.lang.Long.toString(elem); }
+
+ public static LongRef create(long e) { return new LongRef(e); }
+ public static LongRef zero() { return new LongRef(0); }
}
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index 217b51893b..bbf80593db 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import java.lang.reflect.{ Method => JMethod }
import java.lang.{ Class => JClass }
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index b9525ef419..16b2fec6d7 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.util.control.ControlThrowable
diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala
index 04fcc55a1c..4ecc536223 100644
--- a/src/library/scala/runtime/Nothing$.scala
+++ b/src/library/scala/runtime/Nothing$.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
/**
* Dummy class which exist only to satisfy the JVM. It corresponds
diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala
index 25b797a606..1b7685c507 100644
--- a/src/library/scala/runtime/Null$.scala
+++ b/src/library/scala/runtime/Null$.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
/**
* Dummy class which exist only to satisfy the JVM. It corresponds to
diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java
index c8298b8b21..c553c780a8 100644
--- a/src/library/scala/runtime/ObjectRef.java
+++ b/src/library/scala/runtime/ObjectRef.java
@@ -17,4 +17,7 @@ public class ObjectRef<T> implements java.io.Serializable {
public T elem;
public ObjectRef(T elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static <U> ObjectRef create(U e) { return new ObjectRef(e); }
+ public static ObjectRef zero() { return new ObjectRef(null); }
}
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index 97e2b77f96..4f867960a0 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] {
protected def ord = scala.math.Ordering.Boolean
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index ca578620cf..ea23cb8867 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
protected def num = scala.math.Numeric.ByteIsIntegral
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index 5124ca00de..c069fd1fef 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import java.lang.Character
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index cf4eb71ded..f01788a4e9 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.compat.Platform.EOL
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index cb0681bc19..0bca033b7b 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -15,22 +15,22 @@ final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float
protected def integralNum = scala.math.Numeric.FloatAsIfIntegral
def round: Int = math.round(self)
- def ceil: Float = math.ceil(self).toFloat
- def floor: Float = math.floor(self).toFloat
+ def ceil: Float = math.ceil(self.toDouble).toFloat
+ def floor: Float = math.floor(self.toDouble).toFloat
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
* @return the measurement of the angle `x` in radians.
*/
- def toRadians: Float = math.toRadians(self).toFloat
+ def toRadians: Float = math.toRadians(self.toDouble).toFloat
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
*
* @return the measurement of the angle `x` in degrees.
*/
- def toDegrees: Float = math.toDegrees(self).toFloat
+ def toDegrees: Float = math.toDegrees(self.toDouble).toFloat
// isNaN is provided by the implicit conversion to java.lang.Float
// def isNaN: Boolean = java.lang.Float.isNaN(self)
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index 192f94f939..a39710b146 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.collection.immutable.Range
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index ce2d1fdcbd..e5b39b1c09 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
protected def num = scala.math.Numeric.LongIsIntegral
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index aa24dd2ba6..3e5d8781ff 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] {
protected def num = scala.math.Numeric.ShortIsIntegral
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index e8460a203b..6ea6448b1a 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.collection.{ mutable, immutable }
import scala.math.{ ScalaNumericConversions, ScalaNumericAnyConversions }
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 753dd0205e..3a85207235 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -158,13 +158,7 @@ object ScalaRunTime {
// Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
// More background at ticket #2318.
- def ensureAccessible(m: JMethod): JMethod = {
- if (!m.isAccessible) {
- try m setAccessible true
- catch { case _: SecurityException => () }
- }
- m
- }
+ def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m)
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
@@ -356,4 +350,17 @@ object ScalaRunTime {
}
}
}
+
+ def box[T](clazz: jClass[T]): jClass[_] = clazz match {
+ case java.lang.Byte.TYPE => classOf[java.lang.Byte]
+ case java.lang.Short.TYPE => classOf[java.lang.Short]
+ case java.lang.Character.TYPE => classOf[java.lang.Character]
+ case java.lang.Integer.TYPE => classOf[java.lang.Integer]
+ case java.lang.Long.TYPE => classOf[java.lang.Long]
+ case java.lang.Float.TYPE => classOf[java.lang.Float]
+ case java.lang.Double.TYPE => classOf[java.lang.Double]
+ case java.lang.Void.TYPE => classOf[scala.runtime.BoxedUnit]
+ case java.lang.Boolean.TYPE => classOf[java.lang.Boolean]
+ case _ => clazz
+ }
}
diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java
index 461b521e5f..e5e8de3d8b 100644
--- a/src/library/scala/runtime/ShortRef.java
+++ b/src/library/scala/runtime/ShortRef.java
@@ -17,4 +17,7 @@ public class ShortRef implements java.io.Serializable {
public short elem;
public ShortRef(short elem) { this.elem = elem; }
public String toString() { return java.lang.Short.toString(elem); }
+
+ public static ShortRef create(short e) { return new ShortRef(e); }
+ public static ShortRef zero() { return new ShortRef((short)0); }
}
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 1456d9a4e4..d5b51a6e92 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -6,7 +6,9 @@
** **
\* */
-package scala.runtime
+package scala
+package runtime
+
/** A wrapper class that adds string concatenation `+` to any value */
@deprecated("Use Predef.StringAdd", "2.11.0")
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index 21e5efd1fc..de32ac7e86 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -6,7 +6,9 @@
** **
\* */
-package scala.runtime
+package scala
+package runtime
+
/** A wrapper class that adds a `formatted` operation to any value
*/
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index bde69a0f54..b28f6d4269 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 34da42462a..7c501380a3 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
diff --git a/src/library/scala/runtime/VolatileBooleanRef.java b/src/library/scala/runtime/VolatileBooleanRef.java
index e3bd182345..ef5b691118 100755
--- a/src/library/scala/runtime/VolatileBooleanRef.java
+++ b/src/library/scala/runtime/VolatileBooleanRef.java
@@ -17,4 +17,7 @@ public class VolatileBooleanRef implements java.io.Serializable {
volatile public boolean elem;
public VolatileBooleanRef(boolean elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static VolatileBooleanRef create(boolean e) { return new VolatileBooleanRef(e); }
+ public static VolatileBooleanRef zero() { return new VolatileBooleanRef(false); }
}
diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java
index 034b003017..d792b0a386 100755
--- a/src/library/scala/runtime/VolatileByteRef.java
+++ b/src/library/scala/runtime/VolatileByteRef.java
@@ -17,4 +17,7 @@ public class VolatileByteRef implements java.io.Serializable {
volatile public byte elem;
public VolatileByteRef(byte elem) { this.elem = elem; }
public String toString() { return java.lang.Byte.toString(elem); }
+
+ public static VolatileByteRef create(byte e) { return new VolatileByteRef(e); }
+ public static VolatileByteRef zero() { return new VolatileByteRef((byte)0); }
}
diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java
index f90648c5e9..555b171283 100755
--- a/src/library/scala/runtime/VolatileCharRef.java
+++ b/src/library/scala/runtime/VolatileCharRef.java
@@ -17,4 +17,7 @@ public class VolatileCharRef implements java.io.Serializable {
volatile public char elem;
public VolatileCharRef(char elem) { this.elem = elem; }
public String toString() { return java.lang.Character.toString(elem); }
+
+ public static VolatileCharRef create(char e) { return new VolatileCharRef(e); }
+ public static VolatileCharRef zero() { return new VolatileCharRef((char)0); }
}
diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java
index d47c9578c6..1932055c6a 100755
--- a/src/library/scala/runtime/VolatileDoubleRef.java
+++ b/src/library/scala/runtime/VolatileDoubleRef.java
@@ -16,4 +16,7 @@ public class VolatileDoubleRef implements java.io.Serializable {
volatile public double elem;
public VolatileDoubleRef(double elem) { this.elem = elem; }
public String toString() { return java.lang.Double.toString(elem); }
+
+ public static VolatileDoubleRef create(double e) { return new VolatileDoubleRef(e); }
+ public static VolatileDoubleRef zero() { return new VolatileDoubleRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java
index 97da95f7cc..3a81be1146 100755
--- a/src/library/scala/runtime/VolatileFloatRef.java
+++ b/src/library/scala/runtime/VolatileFloatRef.java
@@ -17,4 +17,7 @@ public class VolatileFloatRef implements java.io.Serializable {
volatile public float elem;
public VolatileFloatRef(float elem) { this.elem = elem; }
public String toString() { return java.lang.Float.toString(elem); }
+
+ public static VolatileFloatRef create(float e) { return new VolatileFloatRef(e); }
+ public static VolatileFloatRef zero() { return new VolatileFloatRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java
index e8a68a1a9a..ae015bc8b1 100755
--- a/src/library/scala/runtime/VolatileIntRef.java
+++ b/src/library/scala/runtime/VolatileIntRef.java
@@ -16,4 +16,7 @@ public class VolatileIntRef implements java.io.Serializable {
volatile public int elem;
public VolatileIntRef(int elem) { this.elem = elem; }
public String toString() { return java.lang.Integer.toString(elem); }
+
+ public static VolatileIntRef create(int e) { return new VolatileIntRef(e); }
+ public static VolatileIntRef zero() { return new VolatileIntRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java
index 80e627cd4b..e596f5aa69 100755
--- a/src/library/scala/runtime/VolatileLongRef.java
+++ b/src/library/scala/runtime/VolatileLongRef.java
@@ -17,4 +17,7 @@ public class VolatileLongRef implements java.io.Serializable {
volatile public long elem;
public VolatileLongRef(long elem) { this.elem = elem; }
public String toString() { return java.lang.Long.toString(elem); }
+
+ public static VolatileLongRef create(long e) { return new VolatileLongRef(e); }
+ public static VolatileLongRef zero() { return new VolatileLongRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java
index 848b0632ea..9f1f3ac0cf 100755
--- a/src/library/scala/runtime/VolatileObjectRef.java
+++ b/src/library/scala/runtime/VolatileObjectRef.java
@@ -17,4 +17,7 @@ public class VolatileObjectRef<T> implements java.io.Serializable {
volatile public T elem;
public VolatileObjectRef(T elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static <U> VolatileObjectRef create(U e) { return new VolatileObjectRef(e); }
+ public static VolatileObjectRef zero() { return new VolatileObjectRef(null); }
}
diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java
index 4e91d0dc70..0a2825941f 100755
--- a/src/library/scala/runtime/VolatileShortRef.java
+++ b/src/library/scala/runtime/VolatileShortRef.java
@@ -17,4 +17,7 @@ public class VolatileShortRef implements java.io.Serializable {
volatile public short elem;
public VolatileShortRef(short elem) { this.elem = elem; }
public String toString() { return java.lang.Short.toString(elem); }
+
+ public static VolatileShortRef create(short e) { return new VolatileShortRef(e); }
+ public static VolatileShortRef zero() { return new VolatileShortRef((short)0); }
}
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
index 016a0d04e0..2a0064494b 100644
--- a/src/library/scala/runtime/WorksheetSupport.scala
+++ b/src/library/scala/runtime/WorksheetSupport.scala
@@ -21,7 +21,7 @@ object WorksheetSupport {
private var lastFlush: Long = 0L
private var col = -1
override def write(b: Array[Byte], off: Int, len: Int) = {
- for (idx <- off until (off + len min b.length)) writeOne(b(idx))
+ for (idx <- off until (off + len min b.length)) writeOne(b(idx).toInt)
flush()
}
override def write(c: Int) {
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index e3c25bbd11..74b0a9077b 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
import scala.language.implicitConversions
diff --git a/src/library/scala/sys/PropImpl.scala b/src/library/scala/sys/PropImpl.scala
index b50e0e18a7..3b451ab1d9 100644
--- a/src/library/scala/sys/PropImpl.scala
+++ b/src/library/scala/sys/PropImpl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
import scala.collection.mutable
diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala
index a8f4871870..6018ac852b 100644
--- a/src/library/scala/sys/ShutdownHookThread.scala
+++ b/src/library/scala/sys/ShutdownHookThread.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A minimal Thread wrapper to enhance shutdown hooks. It knows
* how to unregister itself.
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 294be5cd71..39f66f5030 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
import scala.collection.{ mutable, Iterator }
import scala.collection.JavaConverters._
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index e2c4f13830..b31bbf0540 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
@@ -161,21 +162,29 @@ object BasicIO {
*/
def processFully(processLine: String => Unit): InputStream => Unit = in => {
val reader = new BufferedReader(new InputStreamReader(in))
- processLinesFully(processLine)(reader.readLine)
- reader.close()
+ try processLinesFully(processLine)(reader.readLine)
+ finally reader.close()
}
/** Calls `processLine` with the result of `readLine` until the latter returns
- * `null`.
- */
+ * `null` or the current thread is interrupted.
+ */
def processLinesFully(processLine: String => Unit)(readLine: () => String) {
- def readFully() {
- val line = readLine()
- if (line != null) {
- processLine(line)
- readFully()
+ def working = (Thread.currentThread.isInterrupted == false)
+ def halting = { Thread.currentThread.interrupt(); null }
+ def readFully(): Unit =
+ if (working) {
+ val line =
+ try readLine()
+ catch {
+ case _: InterruptedException => halting
+ case e: IOException if !working => halting
+ }
+ if (line != null) {
+ processLine(line)
+ readFully()
+ }
}
- }
readFully()
}
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index 715b364e08..402183a1f0 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index 5d89e45001..c8e548c76b 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 91e267d5e4..adf6d1e724 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala
index f5b26680d9..eedf667c88 100644
--- a/src/library/scala/sys/process/ProcessIO.scala
+++ b/src/library/scala/sys/process/ProcessIO.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index c64ba246fc..2b7fcdeb73 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
@@ -222,8 +223,8 @@ private[process] trait ProcessImpl {
p.exitValue()
}
override def destroy() = {
- try{
- outputThreads foreach (_.stop())
+ try {
+ outputThreads foreach (_.interrupt()) // on destroy, don't bother consuming any more output
p.destroy()
}
finally inputThread.interrupt()
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index a4acb065d0..ae347221ef 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import java.io._
diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala
index 52cba6850d..963fe1c497 100644
--- a/src/library/scala/util/DynamicVariable.scala
+++ b/src/library/scala/util/DynamicVariable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import java.lang.InheritableThreadLocal
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index 5cd35ab6d9..1ed3f4becb 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -8,7 +8,8 @@
-package scala.util
+package scala
+package util
import scala.language.implicitConversions
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index b82259c217..e05fe0875b 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm
* (32 bit version); reference: http://code.google.com/p/smhasher
@@ -81,6 +82,7 @@ class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T =>
* needs to be called to finalize the hash.
*/
@deprecated("Use the object MurmurHash3 instead.", "2.10.0")
+// NOTE: Used by SBT 0.13.0-M2 and below
object MurmurHash {
// Magic values used for MurmurHash's 32 bit hash.
// Don't change these without consulting a hashing expert!
@@ -170,7 +172,7 @@ object MurmurHash {
k = nextMagicB(k)
j += 2
}
- if (j < s.length) h = extendHash(h,s.charAt(j),c,k)
+ if (j < s.length) h = extendHash(h,s.charAt(j).toInt,c,k)
finalizeHash(h)
}
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index cc145134c4..fef9cef246 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util
+package scala
+package util
import java.io.{ IOException, PrintWriter }
import java.util.jar.Attributes.{ Name => AttributeName }
@@ -59,6 +60,8 @@ private[scala] trait PropertiesTrait {
def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt
def envOrNone(name: String) = Option(System getenv name)
+ def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt
+
// for values based on propFilename
def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
@@ -72,7 +75,7 @@ private[scala] trait PropertiesTrait {
* it is an RC, Beta, etc. or was built from source, or if the version
* cannot be read.
*/
- val releaseVersion =
+ val releaseVersion =
for {
v <- scalaPropOrNone("maven.version.number")
if !(v endsWith "-SNAPSHOT")
@@ -86,7 +89,7 @@ private[scala] trait PropertiesTrait {
* @return Some(version) if this is a non-final version, None if this
* is a final release or the version cannot be read.
*/
- val developmentVersion =
+ val developmentVersion =
for {
v <- scalaPropOrNone("maven.version.number")
if v endsWith "-SNAPSHOT"
@@ -119,8 +122,7 @@ private[scala] trait PropertiesTrait {
*/
def lineSeparator = propOrElse("line.separator", "\n")
- /** Various well-known properties.
- */
+ /* Various well-known properties. */
def javaClassPath = propOrEmpty("java.class.path")
def javaHome = propOrEmpty("java.home")
def javaVendor = propOrEmpty("java.vendor")
@@ -136,10 +138,16 @@ private[scala] trait PropertiesTrait {
def userHome = propOrEmpty("user.home")
def userName = propOrEmpty("user.name")
- /** Some derived values.
- */
+ /* Some derived values. */
+ /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */
def isWin = osName startsWith "Windows"
- def isMac = javaVendor startsWith "Apple"
+ // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
+ // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
+ /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
+ def isMac = osName startsWith "Mac OS X"
+
+ /* Some runtime values. */
+ private[scala] def isAvian = javaVmName contains "Avian"
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index b3a8617f15..8d68c5be38 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.collection.mutable.ArrayBuffer
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index fbfeb7d4d9..89db57a55e 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.collection.Seq
import scala.util.control.NonFatal
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 89e1b58d95..5524b10afa 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A class that can be instantiated for the break control abstraction.
* Example usage:
diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala
index 33c90c5815..7ed3d95cd3 100644
--- a/src/library/scala/util/control/ControlThrowable.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A marker trait indicating that the `Throwable` it is mixed into is
* intended for flow control.
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index b97914c4b1..be6d03a145 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
package control
import scala.collection.immutable.List
diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
index 74478f2a49..11fb988e8e 100644
--- a/src/library/scala/util/control/NonFatal.scala
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/**
* Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index 955cee7657..ba3044c718 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** Methods exported by this object implement tail calls via trampolining.
* Tail calling methods have to return their result using `done` or call the
diff --git a/src/library/scala/util/hashing/ByteswapHashing.scala b/src/library/scala/util/hashing/ByteswapHashing.scala
index a96945788c..470479725b 100644
--- a/src/library/scala/util/hashing/ByteswapHashing.scala
+++ b/src/library/scala/util/hashing/ByteswapHashing.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.hashing
+package scala
+package util.hashing
@@ -16,20 +17,20 @@ package scala.util.hashing
/** A fast multiplicative hash by Phil Bagwell.
*/
final class ByteswapHashing[T] extends Hashing[T] {
-
+
def hash(v: T) = byteswap32(v.##)
-
+
}
object ByteswapHashing {
-
+
private class Chained[T](h: Hashing[T]) extends Hashing[T] {
def hash(v: T) = byteswap32(h.hash(v))
}
-
+
/** Composes another `Hashing` with the Byteswap hash.
*/
def chain[T](h: Hashing[T]): Hashing[T] = new Chained(h)
-
+
}
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index b57f858bed..2b72c1dbe3 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.hashing
+package scala
+package util.hashing
import scala.annotation.implicitNotFound
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 0aa7e6f1cb..c85664349e 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.hashing
+package scala
+package util.hashing
import java.lang.Integer.{ rotateLeft => rotl }
@@ -76,7 +77,7 @@ private[hashing] class MurmurHash3 {
h = mix(h, data)
i += 2
}
- if (i < str.length) h = mixLast(h, str.charAt(i))
+ if (i < str.length) h = mixLast(h, str.charAt(i).toInt)
finalizeHash(h, str.length)
}
diff --git a/src/library/scala/util/hashing/package.scala b/src/library/scala/util/hashing/package.scala
index 7d38f151f9..2c8e0154fc 100644
--- a/src/library/scala/util/hashing/package.scala
+++ b/src/library/scala/util/hashing/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
@@ -14,7 +15,7 @@ package scala.util
package object hashing {
-
+
/** Fast multiplicative hash with a nice distribution.
*/
def byteswap32(v: Int): Int = {
@@ -22,7 +23,7 @@ package object hashing {
hc = java.lang.Integer.reverseBytes(hc)
hc * 0x9e3775cd
}
-
+
/** Fast multiplicative hash with a nice distribution
* for 64-bit values.
*/
@@ -31,5 +32,5 @@ package object hashing {
hc = java.lang.Long.reverseBytes(hc)
hc * 0x9e3775cd9e3775cdL
}
-
+
}
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
deleted file mode 100644
index 74f058b4ec..0000000000
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util.logging
-
-/**
- * The trait `ConsoleLogger` is mixed into a concrete class who
- * has class `Logged` among its base classes.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait ConsoleLogger extends Logged {
-
- /** logs argument to Console using [[scala.Console.println]]
- */
- override def log(msg: String): Unit = Console.println(msg)
-}
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
deleted file mode 100644
index f2661d3206..0000000000
--- a/src/library/scala/util/logging/Logged.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.logging
-
-/** Mixing in Logged indicates that a class provides support for logging.
- *
- * For instance:
- * {{{
- * // The developer of the library writes:
- * class MyClass extends Logged {
- * // do stuff, call log
- * }
- *
- * // The user of the library instantiates:
- * val x = new MyClass() with ConsoleLogger
- * }}}
- * and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait Logged {
- /** This method should log the message given as argument somewhere
- * as a side-effect.
- *
- * @param msg message to be logged
- */
- def log(msg: String): Unit = {}
-}
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 981d9af02f..8eac0a2520 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -28,7 +28,8 @@
* into a [[java.lang.String]].
*
*/
-package scala.util.matching
+package scala
+package util.matching
import scala.collection.AbstractIterator
import java.util.regex.{ Pattern, Matcher }
@@ -204,6 +205,20 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group)
else unapplySeq(m.matched)
+ /** Tries to match target.
+ * @param target The string to match
+ * @return The matches
+ */
+ @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0")
+ def unapplySeq(target: Any): Option[List[String]] = target match {
+ case s: CharSequence =>
+ val m = pattern matcher s
+ if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
+ else None
+ case m: Match => unapplySeq(m.matched)
+ case _ => None
+ }
+
// @see UnanchoredRegex
protected def runMatcher(m: Matcher) = m.matches()
diff --git a/src/library/scala/util/parsing/ast/AbstractSyntax.scala b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
deleted file mode 100644
index 30b20d71c6..0000000000
--- a/src/library/scala/util/parsing/ast/AbstractSyntax.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing.ast
-
-import scala.util.parsing.input.Positional
-
-/** This component provides the core abstractions for representing an Abstract Syntax Tree
- *
- * @author Adriaan Moors
- */
-@deprecated("This class will be removed", "2.10.0")
-trait AbstractSyntax {
- /** The base class for elements of the abstract syntax tree.
- */
- trait Element extends Positional
-
- /** The base class for elements in the AST that represent names [[scala.util.parsing.ast.Binders]].
- */
- trait NameElement extends Element {
- def name: String
- override def equals(that: Any): Boolean = that match {
- case n: NameElement => n.name == name
- case _ => false
- }
- }
-}
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
deleted file mode 100644
index a6ad1907c2..0000000000
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ /dev/null
@@ -1,347 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing.ast
-
-import scala.collection.AbstractIterable
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-//DISCLAIMER: this code is highly experimental!
-
- // TODO: avoid clashes when substituting
- // TODO: check binders in the same scope are distinct
-
-/** This trait provides the core ''Scrap-Your-Boilerplate'' abstractions as
- * well as implementations for common datatypes.
- *
- * (Based on Ralf Lämmel's [[http://homepages.cwi.nl/~ralf/syb3/ SYB papers]].)
- *
- * @author Adriaan Moors
- */
-@deprecated("This class will be removed", "2.10.0")
-trait Mappable {
- trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T` is too strict:
- sometimes we want to allow `Forall T >: precision. T => T` for some type `precision`, so that,
- beneath a certain threshold, we have some leeway.
- concretely: to use gmap for substitution, we simply require that ast nodes are mapped to ast nodes,
- we can't require that the type is preserved precisely: a Name may map to e.g., a MethodCall
- */
-
- trait Mappable[T] {
- // one-layer traversal
- def gmap(f: Mapper): T
- // everywhere f x = f (gmapT (everywhere f) x)
- def everywhere(f: Mapper)(implicit c: T => Mappable[T]): T =
- f(gmap(new Mapper { def apply[T <% Mappable[T]](x: T): T = x.everywhere(f)}))
- }
-
- implicit def StringIsMappable(s: String): Mappable[String] =
- new Mappable[String] {
- def gmap(f: Mapper): String = f(s)
- }
-
- implicit def ListIsMappable[t <% Mappable[t]](xs: List[t]): Mappable[List[t]] =
- new Mappable[List[t]] {
- def gmap(f: Mapper): List[t] = (for (x <- xs) yield f(x)).toList
- }
-
- implicit def OptionIsMappable[t <% Mappable[t]](xs: Option[t]): Mappable[Option[t]] =
- new Mappable[Option[t]] {
- def gmap(f: Mapper): Option[t] = (for (x <- xs) yield f(x))
- }
-}
-
-/** This component provides functionality for enforcing variable binding
- * during parse-time.
- *
- * When parsing simple languages, like Featherweight Scala, these parser
- * combinators will fully enforce the binding discipline. When names are
- * allowed to be left unqualified, these mechanisms would have to be
- * complemented by an extra phase that resolves names that couldn't be
- * resolved using the naive binding rules. (Maybe some machinery to
- * model `implicit` binders (e.g., `this` and imported qualifiers)
- * and selection on a binder will suffice?)
- *
- * @author Adriaan Moors
- */
-trait Binders extends AbstractSyntax with Mappable {
- /** A `Scope` keeps track of one or more syntactic elements that represent bound names.
- * The elements it contains share the same scope and must all be distinct, as determined by `==`.
- *
- * A `NameElement` `n` in the AST that is conceptually bound by a `Scope` `s`, is replaced by a
- * `BoundElement(n, s)`. (For example, in `val x:Int=x+1`, the first `x` is modelled by a
- * Scope `s` that contains `x` and the second `x` is represented by a `BoundElement(x, s)`)
- * The term (`x+1`) in scope of the Scope becomes an `UnderBinder(s, x+1)`.
- *
- * A `NameElement` `n` is bound by a `Scope` `s` if it is wrapped as a `BoundElement(n, s)`, and
- * `s` has a binder element that is semantically equal (`equals` or `==`) to `n`.
- *
- * A `Scope` is represented textually by its list of binder elements, followed by the scope's `id`.
- * For example: `[x, y]!1` represents the scope with `id` `1` and binder elements `x` and `y`.
- * (`id` is solely used for this textual representation.)
- */
- class Scope[binderType <: NameElement] extends AbstractIterable[binderType] with Iterable[binderType] {
- private val substitution: mutable.Map[binderType, Element] =
- new mutable.LinkedHashMap[binderType, Element] // a LinkedHashMap is ordered by insertion order -- important!
-
- /** Returns a unique number identifying this Scope (only used for representation purposes). */
- val id: Int = _Binder.genId
-
- /** Returns the binders in this scope.
- * For a typical let-binding, this is just the variable name. For an argument list to a method body,
- * there is one binder per formal argument.
- */
- def iterator = substitution.keysIterator
-
- /** Return the `i`th binder in this scope. */
- def apply(i: Int): binderType = this.iterator.toList(i)
-
- /** Returns true if this container has a binder equal (as determined by `==`) to `b`. */
- def binds(b: binderType): Boolean = substitution.contains(b)
-
- def indexFor(b: binderType): Option[Int] = {
- val iter = this.iterator.zipWithIndex
- for ((that, count) <- iter) {
- if (that.name == b.name) // TODO: why do name equals and structural equals differ?
- return Some(count + 1)
- else
- Console.println(that+"!="+b)
- }
-
- None
- }
-
- /** Adds a new binder, for example the variable name in a local variable declaration.
- *
- * @param b a new binder that is distinct from the existing binders in this scope,
- * and shares their conceptual scope. `canAddBinder(b)` must hold.
- * @return `binds(b)` and `getElementFor(b) eq b` will hold.
- */
- def addBinder(b: binderType) { substitution += Pair(b, b) }
-
- // TODO: strengthen this condition so that no binders may be added after this scope has been
- // linked to its `UnderBinder` (i.e., while parsing, BoundElements may be added to the Scope
- // associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
- /** `canAddElement` indicates whether `b` may be added to this scope.
- *
- *
- * @return true if `b` had not been added yet
- */
- def canAddBinder(b: binderType): Boolean = !binds(b)
-
- /** ''Replaces'' the bound occurrences of a contained binder by their new value.
- * The bound occurrences of `b` are not actually replaced; the scope keeps track
- * of a substitution that maps every binder to its current value. Since a `BoundElement` is
- * a proxy for the element it is bound to by its binder, `substitute` may thus be thought of
- * as replacing all the bound occurrences of the given binder `b` by their new value `value`.
- *
- * @param b the binder whose bound occurrences should be given a new value. `binds(b)` must hold.
- * @param value the new value for the bound occurrences of `b`
- * @return `getElementFor(b) eq value` will hold.
- */
- def substitute(b: binderType, value: Element): Unit = substitution(b) = value
-
- /** Returns the current value for the bound occurrences of `b`.
- *
- * @param b the contained binder whose current value should be returned `binds(b)` must hold.
- */
- def getElementFor(b: binderType): Element = substitution(b)
-
- override def toString: String = this.iterator.toList.mkString("[",", ","]")+"!"+id // TODO show substitution?
-
- /** Returns a list of strings that represent the binder elements, each tagged with this scope's id. */
- def bindersToString: List[String] = (for(b <- this.iterator) yield b+"!"+id).toList
-
- /** Return a new inheriting scope that won't check whether binding is respected until the scope is left (so as to support forward references). */
- def allowForwardRef: Scope[binderType] = this // TODO
-
- /** Return a nested scope -- binders entered into it won't be visible in this scope, but if this scope allows forward references,
- * the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left.
- */
- def nested: Scope[binderType] = this // TODO
-
- def onEnter() {}
- def onLeft() {}
- }
-
-
- trait BindingSensitive {
- // would like to specify this as one method:
- // def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean
- // def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean
- }
-
- /** A `BoundElement` is bound in a certain scope `scope`, which keeps track of the actual element that
- * `el` stands for.
- *
- * A `BoundElement` is represented textually by its bound element, followed by its scope's `id`.
- * For example: `x@1` represents the variable `x` that is bound in the scope with `id` `1`.
- *
- * @note `scope.binds(el)` holds before and after.
- */
- case class BoundElement[boundElement <: NameElement](el: boundElement, scope: Scope[boundElement]) extends NameElement with Proxy with BindingSensitive {
- /** Returns the element this `BoundElement` stands for.
- * The `Proxy` trait ensures `equals`, `hashCode` and `toString` are forwarded to
- * the result of this method.
- */
- def self: Element = scope.getElementFor(el)
-
- def name = self.asInstanceOf[NameElement].name // TODO: this is only safe when substituted to a NameElement, which certainly isn't required -- I want dynamic inheritance! :)
-
- // decorate element's representation with the id of the scope it's bound in
- override def toString: String = super.toString+"@"+scope.id
-
- def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean = scope.indexFor(el) == other.scope.indexFor(other.el)
- }
-
- /** A variable that escaped its scope (i.e., a free variable) -- we don't deal very well with these yet. */
- class UnboundElement[N <: NameElement](private val el: N) extends NameElement {
- def name = el.name+"@??"
- }
-
- // this is useless, as Element is a supertype of BoundElement --> the coercion will never be inferred
- // if we knew a more specific type for the element that the bound element represents, this could make sense
- // implicit def BoundElementProxy[t <: NameElement](e: BoundElement[t]): Element = e.self
-
- /** Represents an element with variables that are bound in a certain scope. */
- class UnderBinder[binderType <: NameElement, elementT <% Mappable[elementT]](val scope: Scope[binderType], private[Binders] val element: elementT) extends Element with BindingSensitive {
- override def toString: String = "(" + scope.toString + ") in { "+element.toString+" }"
-
- /** Alpha-equivalence -- TODO
- * Returns true if the `element` of the `other` `UnderBinder` is equal to this `element` up to alpha-conversion.
- *
- * That is, regular equality is used for all elements but `BoundElement`s: such an element is
- * equal to a `BoundElement` in `other` if their binders are equal. Binders are equal if they
- * are at the same index in their respective scope.
- *
- * Example:
- * {{{
- * UnderBinder([x, y]!1, x@1) alpha_== UnderBinder([a, b]!2, a@2)
- * ! (UnderBinder([x, y]!1, y@1) alpha_== UnderBinder([a, b]!2, a@2))
- * }}}
- */
- /*def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean = {
- var result = true
-
- // TODO: generic zip or gmap2
- element.gmap2(other.element, new Mapper2 {
- def apply[s <% Mappable[s], t <% Mappable[t]](x :{s, t}): {s, t} = x match {
- case {be1: BoundElement[_], be2: BoundElement[_]} => result == result && be1.alpha_==(be2) // monadic gmap (cheating using state directly)
- case {ub1: UnderBinder[_, _], ub2: UnderBinder[_, _]} => result == result && be1.alpha_==(be2)
- case {a, b} => result == result && a.equals(b)
- }; x
- })
- }*/
-
- def cloneElementWithSubst(subst: Map[NameElement, NameElement]) = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
- case substable: NameElement if subst.contains(substable) => subst.get(substable).asInstanceOf[t] // TODO: wrong... substitution is not (necessarily) the identity function
- //Console.println("substed: "+substable+"-> "+subst.get(substable)+")");
- case x => x // Console.println("subst: "+x+"(keys: "+subst.keys+")");x
- }})
-
- // TODO
- def cloneElementNoBoundElements = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
- case BoundElement(el, _) => new UnboundElement(el).asInstanceOf[t] // TODO: precision stuff
- case x => x
- }})
-
- def extract: elementT = cloneElementNoBoundElements
- def extract(subst: Map[NameElement, NameElement]): elementT = cloneElementWithSubst(subst)
-
- /** Get a string representation of element, normally we don't allow direct access to element, but just getting a string representation is ok. */
- def elementToString: String = element.toString
- }
-
- //SYB type class instances
- implicit def UnderBinderIsMappable[bt <: NameElement <% Mappable[bt], st <% Mappable[st]](ub: UnderBinder[bt, st]): Mappable[UnderBinder[bt, st]] =
- new Mappable[UnderBinder[bt, st]] {
- def gmap(f: Mapper): UnderBinder[bt, st] = UnderBinder(f(ub.scope), f(ub.element))
- }
-
- implicit def ScopeIsMappable[bt <: NameElement <% Mappable[bt]](scope: Scope[bt]): Mappable[Scope[bt]] =
- new Mappable[Scope[bt]] {
- def gmap(f: Mapper): Scope[bt] = { val newScope = new Scope[bt]()
- for(b <- scope) newScope.addBinder(f(b))
- newScope
- }
- }
-
- implicit def NameElementIsMappable(self: NameElement): Mappable[NameElement] = new Mappable[NameElement] {
- def gmap(f: Mapper): NameElement = self match {
- case BoundElement(el, scope) => BoundElement(f(el), f(scope))
- case _ => UserNameElementIsMappable(self).gmap(f)
- }
- }
-
- def UserNameElementIsMappable[t <: NameElement](self: t): Mappable[t]
-
- object UnderBinder {
- def apply[binderType <: NameElement, elementT <% Mappable[elementT]](scope: Scope[binderType], element: elementT) = new UnderBinder(scope, element)
- def unit[bt <: NameElement, elementT <% Mappable[elementT]](x: elementT) = UnderBinder(new Scope[bt](), x)
- }
-
- /** If a list of `UnderBinder`s all have the same scope, they can be turned in to an `UnderBinder`
- * containing a list of the elements in the original `UnderBinder`.
- *
- * The name `sequence` comes from the fact that this method's type is equal to the type of monadic sequence.
- *
- * @note `!orig.isEmpty` implies `orig.forall(ub => ub.scope eq orig(0).scope)`
- *
- */
- def sequence[bt <: NameElement, st <% Mappable[st]](orig: List[UnderBinder[bt, st]]): UnderBinder[bt, List[st]] =
- if(orig.isEmpty) UnderBinder.unit(Nil)
- else UnderBinder(orig(0).scope, orig.map(_.element))
-
- // couldn't come up with a better name...
- def unsequence[bt <: NameElement, st <% Mappable[st]](orig: UnderBinder[bt, List[st]]): List[UnderBinder[bt, st]] =
- orig.element.map(sc => UnderBinder(orig.scope, sc))
-
- //TODO: more documentation
- /** An environment that maps a `NameElement` to the scope in which it is bound.
- * This can be used to model scoping during parsing.
- *
- * @note This class uses similar techniques as described by ''Burak Emir'' in
- * [[http://library.epfl.ch/theses/?nr=3899 Object-oriented pattern matching]],
- * but uses `==` instead of `eq`, thus types can't be unified in general.
- */
- abstract class BinderEnv {
- def apply[A <: NameElement](v: A): Option[Scope[A]]
- def extend[a <: NameElement](v : a, x : Scope[a]) = new BinderEnv {
- def apply[b <: NameElement](w : b): Option[Scope[b]] =
- if(w == v) Some(x.asInstanceOf[Scope[b]])
- else BinderEnv.this.apply(w)
- }
- }
-
- object EmptyBinderEnv extends BinderEnv {
- def apply[A <: NameElement](v: A): Option[Scope[A]] = None
- }
-
- // TODO: move this to some utility object higher in the scala hierarchy?
- /** Returns a given result, but executes the supplied closure before returning.
- * (The effect of this closure does not influence the returned value.)
- */
- trait ReturnAndDo[T]{
- /**
- * @param block code to be executed, purely for its side-effects
- */
- def andDo(block: => Unit): T
- }
-
- def return_[T](result: T): ReturnAndDo[T] =
- new ReturnAndDo[T] {
- val r = result
- def andDo(block: => Unit): T = {block; r}
- }
-
- private object _Binder {
- private var currentId = 0
- private[Binders] def genId = return_(currentId) andDo {currentId=currentId+1}
- }
-}
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index ad06749c0d..0683ea927d 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.language.implicitConversions
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index 89832d3fb2..01288a182e 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.annotation.migration
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index cd0907e40f..a11dd18e62 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 542a781b60..4602c3cc53 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.util.parsing.input._
import scala.collection.mutable.ListBuffer
@@ -157,12 +158,6 @@ trait Parsers {
private lazy val lastNoSuccessVar = new DynamicVariable[Option[NoSuccess]](None)
- @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
- def lastNoSuccess: NoSuccess = lastNoSuccessVar.value.orNull
-
- @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
- def lastNoSuccess_=(x: NoSuccess): Unit = lastNoSuccessVar.value = Option(x)
-
/** A common super-class for unsuccessful parse results. */
sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
val successful = false
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index d17d0cac8d..8ebbc573ad 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import java.util.regex.Pattern
import scala.util.matching.Regex
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index c25c97278f..d8029d068f 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package lexical
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index f6a8daabd9..2e12915bb8 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package lexical
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index 2fbc1ec136..32d7502cda 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package lexical
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index d3ae0ea54a..5b9d14c9a7 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package syntactical
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 7283b01da4..adcf85da7a 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package syntactical
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index 1c4b25b999..b06babcd7e 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package syntactical
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
deleted file mode 100644
index 80e9b0df39..0000000000
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-package scala.util.parsing.combinator.testing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input._
-import scala.language.postfixOps
-
-@deprecated("This class will be removed", "2.10.0")
-case class Ident(s: String)
-@deprecated("This class will be removed", "2.10.0")
-case class Number(n: Int)
-@deprecated("This class will be removed", "2.10.0")
-case class Str(s: String)
-
-@deprecated("This class will be removed", "2.10.0")
-object RegexTest extends RegexParsers {
- val ident: Parser[Any] = """[a-zA-Z_]\w*""".r ^^ (s => Ident(s))
- val number: Parser[Any] = """\d\d*""".r ^^ (s => Number(s.toInt))
- val string: Parser[Any] = "\".*\"".r ^^ (s => Str(s.substring(1, s.length - 1)))
- val parser = (ident | number | string)*
-
- def main(args: Array[String]) = {
- val in = args mkString " "
- println("\nin : "+in)
- println(phrase[Any](parser)(new CharSequenceReader(in)))
- }
-}
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
deleted file mode 100644
index 3cdab2a885..0000000000
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing.combinator.testing
-
-import scala.util.parsing.combinator.lexical.Lexical
-import scala.util.parsing.combinator.syntactical.TokenParsers
-
-/** Facilitates testing a given parser on various input strings.
- *
- * Example use:
- * {{{
- * val syntactic = new MyParsers
- * }}}
- * and
- * {{{
- * val parser = syntactic.term
- * }}}
- * (If `MyParsers` extends [[scala.util.parsing.combinator.syntactical.TokenParsers]]
- * with a parser called `term`.)
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-@deprecated("This class will be removed", "2.10.0")
-abstract class Tester {
-
- val syntactic: TokenParsers { val lexical: Lexical }
- val parser: syntactic.Parser[Any]
-
- /** Scans a String (using a `syntactic.lexical.Scanner`), parses it using
- * `phrase(parser)`, and prints the input and the parsed result to the
- * console.
- */
- def test(in: String) {
- Console.println("\nin : "+in)
- Console.println(syntactic.phrase[Any](parser)(new syntactic.lexical.Scanner(in)))
- }
-}
diff --git a/src/library/scala/util/parsing/combinator/token/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
index 605f53bf1d..a102d1541e 100644
--- a/src/library/scala/util/parsing/combinator/token/StdTokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package token
diff --git a/src/library/scala/util/parsing/combinator/token/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
index ff92802d77..5c3f1f95b5 100644
--- a/src/library/scala/util/parsing/combinator/token/Tokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/Tokens.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package token
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
index 3ba69b229b..22530cb9aa 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/library/scala/util/parsing/input/CharArrayReader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** An object encapsulating basic character constants.
*
diff --git a/src/library/scala/util/parsing/input/CharSequenceReader.scala b/src/library/scala/util/parsing/input/CharSequenceReader.scala
index 02aa2ab7b8..8e7751cc82 100644
--- a/src/library/scala/util/parsing/input/CharSequenceReader.scala
+++ b/src/library/scala/util/parsing/input/CharSequenceReader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** An object encapsulating basic character constants.
*
diff --git a/src/library/scala/util/parsing/input/NoPosition.scala b/src/library/scala/util/parsing/input/NoPosition.scala
index 40584b3293..4a32264b79 100644
--- a/src/library/scala/util/parsing/input/NoPosition.scala
+++ b/src/library/scala/util/parsing/input/NoPosition.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** Undefined position.
*
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 6b00af4ce2..23f79c74d1 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
import scala.collection.mutable.ArrayBuffer
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/library/scala/util/parsing/input/PagedSeqReader.scala
index 9140bf2a4f..468f1f9a5f 100644
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ b/src/library/scala/util/parsing/input/PagedSeqReader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
import scala.collection.immutable.PagedSeq
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index 5e0cbbff5e..b7995a6471 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** `Position` is the base trait for objects describing a position in a `document`.
*
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/library/scala/util/parsing/input/Positional.scala
index 87cb16eac5..cfde67cadd 100644
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ b/src/library/scala/util/parsing/input/Positional.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** A trait for objects that have a source position.
*
diff --git a/src/library/scala/util/parsing/input/Reader.scala b/src/library/scala/util/parsing/input/Reader.scala
index bded57bee1..9dbf08a7ca 100644
--- a/src/library/scala/util/parsing/input/Reader.scala
+++ b/src/library/scala/util/parsing/input/Reader.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** An interface for streams of values that have positions.
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
index ba7ab65845..30eb097fd7 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/library/scala/util/parsing/input/StreamReader.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
import java.io.BufferedReader
import scala.collection.immutable.PagedSeq
@@ -22,7 +23,7 @@ object StreamReader {
*
* @param in the `java.io.Reader` that provides the underlying
* stream of characters for this Reader.
- */
+ */
def apply(in: java.io.Reader): StreamReader = {
new StreamReader(PagedSeq.fromReader(in), 0, 1)
}
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
index 8f951d519a..b06dddf532 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/library/scala/util/parsing/json/JSON.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.json
+package scala
+package util.parsing.json
/**
* This object provides a simple interface to the JSON parser class.
@@ -27,6 +28,7 @@ package scala.util.parsing.json
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This object will be removed.", "2.11.0")
object JSON extends Parser {
/**
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
index 762c1352a7..7fc4e0bab6 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/library/scala/util/parsing/json/Lexer.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing.json
+package scala
+package util.parsing.json
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.lexical._
@@ -17,6 +18,7 @@ import scala.util.parsing.input.CharArrayReader.EofCh
/**
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
class Lexer extends StdLexical with ImplicitConversions {
override def token: Parser[Token] =
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala
index bf1162000b..521dfc6612 100644
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ b/src/library/scala/util/parsing/json/Parser.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing.json
+package scala
+package util.parsing.json
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical._
@@ -18,6 +19,7 @@ import scala.util.parsing.combinator.syntactical._
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
sealed abstract class JSONType {
/**
* This version of toString allows you to provide your own value
@@ -39,6 +41,7 @@ sealed abstract class JSONType {
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This object will be removed.", "2.11.0")
object JSONFormat {
/**
* This type defines a function that can be used to
@@ -80,7 +83,7 @@ object JSONFormat {
* Per RFC4627, section 2.5, we're not technically required to
* encode the C1 codes, but we do to be safe.
*/
- case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => "\\u%04x".format(c: Int)
+ case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => "\\u%04x".format(c.toInt)
case c => c
}.mkString
}
@@ -90,6 +93,7 @@ object JSONFormat {
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
case class JSONObject (obj : Map[String,Any]) extends JSONType {
def toString (formatter : JSONFormat.ValueFormatter) =
"{" + obj.map({ case (k,v) => formatter(k.toString) + " : " + formatter(v) }).mkString(", ") + "}"
@@ -99,6 +103,7 @@ case class JSONObject (obj : Map[String,Any]) extends JSONType {
* Represents a JSON Array (list).
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
case class JSONArray (list : List[Any]) extends JSONType {
def toString (formatter : JSONFormat.ValueFormatter) =
"[" + list.map(formatter).mkString(", ") + "]"
@@ -109,6 +114,7 @@ case class JSONArray (list : List[Any]) extends JSONType {
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
class Parser extends StdTokenParsers with ImplicitConversions {
// Fill in abstract defs
type Tokens = Lexer
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
index cba0b96875..33e58ba7e7 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/library/scala/xml/Atom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `Atom` provides an XML node for text (`PCDATA`).
* It is used in both non-bound and bound XML representations.
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 234281163d..e4b2b69fc6 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This singleton object contains the `apply` and `unapply` methods for
* convenient construction and deconstruction.
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
index ff4280d691..b8dccdcb16 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/library/scala/xml/Comment.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `Comment` implements an XML node for comments.
*
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
index a064c4d8e8..9a725014fc 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/library/scala/xml/Document.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** A document information item (according to InfoSet spec). The comments
* are copied from the Infoset spec, only augmented with some information
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index fc32e45a5e..4200b7046a 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This singleton object contains the `apply` and `unapplySeq` methods for
* convenient construction and deconstruction. It is possible to deconstruct
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
index a7b9835a7e..7a58831075 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/library/scala/xml/EntityRef.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `EntityRef` implements an XML node for entity references.
*
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
index 20f2405967..021d185812 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/library/scala/xml/Equality.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** In an attempt to contain the damage being inflicted on consistency by the
* ad hoc `equals` methods spread around `xml`, the logic is centralized and
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
index 2ee3941aa1..e3af615008 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/library/scala/xml/Group.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** A hack to group XML nodes in one node for output.
*
diff --git a/src/library/scala/xml/MalformedAttributeException.scala b/src/library/scala/xml/MalformedAttributeException.scala
index 3431cb6765..d499ad3e10 100644
--- a/src/library/scala/xml/MalformedAttributeException.scala
+++ b/src/library/scala/xml/MalformedAttributeException.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
case class MalformedAttributeException(msg: String) extends RuntimeException(msg)
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 3bf3ebb1c0..8b5ea187cb 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.sbToString
import scala.annotation.tailrec
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index 32c378f3ef..b320466976 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.sbToString
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 7b1a97e8f2..e121284252 100755
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This singleton object contains the `unapplySeq` method for
* convenient deconstruction.
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/library/scala/xml/NodeBuffer.scala
index 2db4338fb2..ae7c7b2bf8 100644
--- a/src/library/scala/xml/NodeBuffer.scala
+++ b/src/library/scala/xml/NodeBuffer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/**
* This class acts as a Buffer for nodes. If it is used as a sequence of
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index d2efc947b1..b8022472fb 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
import mutable.{ Builder, ListBuffer }
diff --git a/src/library/scala/xml/Null.scala b/src/library/scala/xml/Null.scala
index b39ef5dc67..f763c023c4 100644
--- a/src/library/scala/xml/Null.scala
+++ b/src/library/scala/xml/Null.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.isNameStart
import scala.collection.Iterator
diff --git a/src/library/scala/xml/PCData.scala b/src/library/scala/xml/PCData.scala
index 64818a9c00..31eea2b6d7 100644
--- a/src/library/scala/xml/PCData.scala
+++ b/src/library/scala/xml/PCData.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This class (which is not used by all XML parsers, but always used by the
* XHTML one) represents parseable character data, which appeared as CDATA
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index 429cd682d6..4ab79c8677 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** prefixed attributes always have a non-null namespace.
*
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 720fe79b1d..9e01905357 100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.sbToString
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
index 64a9dd5ca3..189c1c6878 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/library/scala/xml/ProcInstr.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** an XML node for processing instructions (PI)
*
diff --git a/src/library/scala/xml/QNode.scala b/src/library/scala/xml/QNode.scala
index d4d3872181..f9e3f1854b 100644
--- a/src/library/scala/xml/QNode.scala
+++ b/src/library/scala/xml/QNode.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This object provides an extractor method to match a qualified node with
* its namespace URI
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/library/scala/xml/SpecialNode.scala
index 4c1b81c7ff..5fef8ef66c 100644
--- a/src/library/scala/xml/SpecialNode.scala
+++ b/src/library/scala/xml/SpecialNode.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** `SpecialNode` is a special XML node which represents either text
* `(PCDATA)`, a comment, a `PI`, or an entity ref.
diff --git a/src/library/scala/xml/Text.scala b/src/library/scala/xml/Text.scala
index 782c80f100..debea0c025 100644
--- a/src/library/scala/xml/Text.scala
+++ b/src/library/scala/xml/Text.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `Text` implements an XML node for text (PCDATA).
* It is used in both non-bound and bound XML representations.
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
index 0b96379d85..514b1701af 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/library/scala/xml/TextBuffer.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
import Utility.isSpace
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
index 1ed1d50e10..474fbbbdb5 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/library/scala/xml/TopScope.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** top level namespace scope. only contains the predefined binding
* for the &quot;xml&quot; prefix which is bound to
diff --git a/src/library/scala/xml/TypeSymbol.scala b/src/library/scala/xml/TypeSymbol.scala
index f02c0263c0..fb371ee340 100644
--- a/src/library/scala/xml/TypeSymbol.scala
+++ b/src/library/scala/xml/TypeSymbol.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
abstract class TypeSymbol
diff --git a/src/library/scala/xml/Unparsed.scala b/src/library/scala/xml/Unparsed.scala
index ef80823611..bc190eb724 100644
--- a/src/library/scala/xml/Unparsed.scala
+++ b/src/library/scala/xml/Unparsed.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** An XML node for unparsed content. It will be output verbatim, all bets
* are off regarding wellformedness etc.
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index 2985591c95..6fa827da5f 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** Unprefixed attributes have the null namespace, and no prefix field
*
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index fff27c6e30..9134476401 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import scala.collection.mutable
import parsing.XhtmlEntities
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index ec5e5e9e1c..020264e509 100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import parsing.NoBindingFactoryAdapter
import factory.XMLLoader
diff --git a/src/library/scala/xml/Xhtml.scala b/src/library/scala/xml/Xhtml.scala
index 6730548b73..6a12c1a89a 100644
--- a/src/library/scala/xml/Xhtml.scala
+++ b/src/library/scala/xml/Xhtml.scala
@@ -1,5 +1,6 @@
-package scala.xml
+package scala
+package xml
import parsing.XhtmlEntities
import Utility.{ sbToString, isAtomAndNotText }
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index debdf37975..4007985dce 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.xml
+package scala
+package xml
package dtd
import scala.xml.dtd.impl._
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index ca84bcad70..71b391c422 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** Parser for regexps (content models in DTD element declarations) */
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
index 1f8af3b59e..16a824fe2c 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/library/scala/xml/dtd/DTD.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
import scala.collection.mutable
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index fd2eaa30ba..e6804478bd 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package dtd
import Utility.sbToString
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index af7e77e76f..849d560cc9 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** An XML node for document type declaration.
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index ad74acb77e..4830769a7d 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package dtd
import PartialFunction._
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index 80ada0caaa..5a1b5d1a19 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** an ExternalIDs - either PublicID or SystemID
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 53404e34a7..5f9d1ccaed 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** Scanner for regexps (content models in DTD element declarations)
diff --git a/src/library/scala/xml/dtd/Tokens.scala b/src/library/scala/xml/dtd/Tokens.scala
index eaffba99a4..07e888e77a 100644
--- a/src/library/scala/xml/dtd/Tokens.scala
+++ b/src/library/scala/xml/dtd/Tokens.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package dtd
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
index 15640e2da7..1bfae55286 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/library/scala/xml/dtd/ValidationException.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package dtd
diff --git a/src/library/scala/xml/dtd/impl/Base.scala b/src/library/scala/xml/dtd/impl/Base.scala
index dd277779f6..91ff03a93a 100644
--- a/src/library/scala/xml/dtd/impl/Base.scala
+++ b/src/library/scala/xml/dtd/impl/Base.scala
@@ -8,7 +8,8 @@
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
/** Basic regular expressions.
*
diff --git a/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
index 99d5ab62e1..f30309b037 100644
--- a/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
+++ b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
import scala.collection.{ mutable, immutable }
diff --git a/src/library/scala/xml/dtd/impl/DetWordAutom.scala b/src/library/scala/xml/dtd/impl/DetWordAutom.scala
index 5c1dcb7ff8..6f8ba4de72 100644
--- a/src/library/scala/xml/dtd/impl/DetWordAutom.scala
+++ b/src/library/scala/xml/dtd/impl/DetWordAutom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
import scala.collection.{ mutable, immutable }
diff --git a/src/library/scala/xml/dtd/impl/Inclusion.scala b/src/library/scala/xml/dtd/impl/Inclusion.scala
index 0ae78519ca..07b6afaeba 100644
--- a/src/library/scala/xml/dtd/impl/Inclusion.scala
+++ b/src/library/scala/xml/dtd/impl/Inclusion.scala
@@ -8,7 +8,8 @@
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
/** A fast test of language inclusion between minimal automata.
diff --git a/src/library/scala/xml/dtd/impl/NondetWordAutom.scala b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
index 8e0b5a3a4c..0bb19a7e3e 100644
--- a/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
+++ b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
import scala.collection.{ immutable, mutable }
@@ -51,7 +52,7 @@ private[dtd] abstract class NondetWordAutom[T <: AnyRef] {
override def toString = {
val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
- val deltaString = (0 until nstates)
+ val deltaString = (0 until nstates)
.map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString
"[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString)
diff --git a/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
index 0b5297510d..1720604132 100644
--- a/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
+++ b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
@@ -8,7 +8,8 @@
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions.
*
diff --git a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
index d1ea4b6e9e..632ca1eb18 100644
--- a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
+++ b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
import scala.collection.{ mutable, immutable }
diff --git a/src/library/scala/xml/dtd/impl/SyntaxError.scala b/src/library/scala/xml/dtd/impl/SyntaxError.scala
index b0e0b8b6cd..a5b8a5aba0 100644
--- a/src/library/scala/xml/dtd/impl/SyntaxError.scala
+++ b/src/library/scala/xml/dtd/impl/SyntaxError.scala
@@ -8,7 +8,8 @@
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
/** This runtime exception is thrown if an attempt to instantiate a
* syntactically incorrect expression is detected.
diff --git a/src/library/scala/xml/dtd/impl/WordBerrySethi.scala b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
index 90d7fe760a..9bf3fa518b 100644
--- a/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
+++ b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
import scala.collection.{ immutable, mutable }
diff --git a/src/library/scala/xml/dtd/impl/WordExp.scala b/src/library/scala/xml/dtd/impl/WordExp.scala
index 38f8aea697..a4bb54c1ea 100644
--- a/src/library/scala/xml/dtd/impl/WordExp.scala
+++ b/src/library/scala/xml/dtd/impl/WordExp.scala
@@ -8,7 +8,8 @@
-package scala.xml.dtd.impl
+package scala
+package xml.dtd.impl
/**
* The class `WordExp` provides regular word expressions.
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index b463fda5ba..947f99e6a4 100755
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package factory
import parsing.ValidatingMarkupHandler
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index 49a6d622a7..bc074bfc83 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package factory
/** This class logs what the nodefactory is actually doing.
@@ -14,8 +15,9 @@ package factory
{{{
object testLogged extends App {
val x = new scala.xml.parsing.NoBindingFactoryAdapter
- with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem]
- with scala.util.logging.ConsoleLogger
+ with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem] {
+ override def log(s: String) = println(s)
+ }
Console.println("Start")
val doc = x.load(new java.net.URL("http://example.com/file.xml"))
@@ -27,7 +29,8 @@ object testLogged extends App {
* @author Burak Emir
* @version 1.0
*/
-trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.logging.Logged {
+@deprecated("This trait will be removed.", "2.11")
+trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] {
// configuration values
val logNode = true
val logText = false
@@ -82,4 +85,6 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
super.makeProcInstr(t, s)
}
+ @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
+ def log(msg: String): Unit = {}
}
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 28a1b6fff4..94801bb554 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package factory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index bd18f2a699..b69f187039 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package factory
import javax.xml.parsers.SAXParserFactory
diff --git a/src/library/scala/xml/include/CircularIncludeException.scala b/src/library/scala/xml/include/CircularIncludeException.scala
index 5e74967d54..351f403008 100644
--- a/src/library/scala/xml/include/CircularIncludeException.scala
+++ b/src/library/scala/xml/include/CircularIncludeException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include
/**
diff --git a/src/library/scala/xml/include/UnavailableResourceException.scala b/src/library/scala/xml/include/UnavailableResourceException.scala
index f00cc58699..47b176e0f3 100644
--- a/src/library/scala/xml/include/UnavailableResourceException.scala
+++ b/src/library/scala/xml/include/UnavailableResourceException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include
/**
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/library/scala/xml/include/XIncludeException.scala
index 84033f853f..11e1644d83 100644
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ b/src/library/scala/xml/include/XIncludeException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include
/**
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
index 8d8ce5b290..57ab5ed91c 100644
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include.sax
import java.io.InputStream
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 9079b5f9c7..3fa3beefb0 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include.sax
import scala.xml.include._
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index 8fcd66d4c0..1939fa1875 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include.sax
import scala.collection.mutable
@@ -94,7 +95,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
// However, it is required if text contains ]]>
// (The end CDATA section delimiter)
else if (c == '>') out.write("&gt;")
- else out.write(c)
+ else out.write(c.toInt)
i += 1
}
}
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/library/scala/xml/parsing/ConstructingHandler.scala
index 6fda4dabfb..ba416e4301 100755
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ b/src/library/scala/xml/parsing/ConstructingHandler.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
/** Implementation of MarkupHandler that constructs nodes.
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index 404411812e..3caeddabf4 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
import java.io.File
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
index 0152e44cda..6ec7474843 100755
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/library/scala/xml/parsing/ExternalSources.scala
index aaac588092..bb939bca95 100644
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ b/src/library/scala/xml/parsing/ExternalSources.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
import java.net.URL
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index 8659d3f0c4..2154bdf5ba 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import java.io.{ InputStream, Reader, File, FileDescriptor, FileInputStream }
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/library/scala/xml/parsing/FatalError.scala
index a8b4f8f8cf..ab3cb2a74d 100644
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ b/src/library/scala/xml/parsing/FatalError.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
/** !!! This is poorly named, but I guess it's in the API.
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
index 7028161821..1ebffb9c90 100755
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -8,12 +8,12 @@
-package scala.xml
+package scala
+package xml
package parsing
import scala.collection.mutable
import scala.io.Source
-import scala.util.logging.Logged
import scala.xml.dtd._
/** class that handles markup - provides callback methods to MarkupParser.
@@ -25,8 +25,8 @@ import scala.xml.dtd._
* @todo can we ignore more entity declarations (i.e. those with extIDs)?
* @todo expanding entity references
*/
-abstract class MarkupHandler extends Logged
-{
+abstract class MarkupHandler {
+
/** returns true is this markup handler is validating */
val isValidating: Boolean = false
@@ -121,4 +121,7 @@ abstract class MarkupHandler extends Logged
def unparsedEntityDecl(name: String, extID: ExternalID, notat: String): Unit = ()
def notationDecl(notat: String, extID: ExternalID): Unit = ()
def reportSyntaxError(pos: Int, str: String): Unit
+
+ @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
+ def log(msg: String): Unit = {}
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 9c7bb60475..3bbd136b67 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.io.Source
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index 7bfbcc7fff..57c1651558 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.io.Source
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
index 22dd450072..56ac185f47 100644
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package parsing
import factory.NodeFactory
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/library/scala/xml/parsing/TokenTests.scala
index c9cafaeea1..8dd9cdfaa3 100644
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ b/src/library/scala/xml/parsing/TokenTests.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
/**
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index 018ae4d2cd..1b20901249 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -8,13 +8,13 @@
-package scala.xml
+package scala
+package xml
package parsing
import scala.xml.dtd._
-import scala.util.logging.Logged
-abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
+abstract class ValidatingMarkupHandler extends MarkupHandler {
var rootLabel:String = _
var qStack: List[Int] = Nil
@@ -25,20 +25,6 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
final override val isValidating = true
- override def log(msg: String) {}
-
- /*
- override def checkChildren(pos: Int, pre: String, label:String,ns:NodeSeq): Unit = {
- Console.println("checkChildren()");
- val decl = lookupElemDecl(label);
- // @todo: nice error message
- val res = decl.contentModel.validate(ns);
- Console.println("res = "+res);
- if(!res)
- //sys.error("invalid!");
- }
- */
-
override def endDTD(n:String) = {
rootLabel = n
}
@@ -115,5 +101,4 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
/** report a syntax error */
def reportValidationError(pos: Int, str: String): Unit
-
}
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/library/scala/xml/parsing/XhtmlEntities.scala
index 1bb843818a..3683af202c 100644
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ b/src/library/scala/xml/parsing/XhtmlEntities.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala
index 33b94c9bd7..6ce5bec8d0 100644
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ b/src/library/scala/xml/parsing/XhtmlParser.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.io.Source
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
index fc510b5f18..a1489ef3f4 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/library/scala/xml/persistent/CachedFileStorage.scala
@@ -6,14 +6,15 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package persistent
import java.io.{ File, FileOutputStream }
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.lang.Thread
-import scala.util.logging.Logged
+
import scala.collection.Iterator
/** Mutable storage of immutable xml trees. Everything is kept in memory,
@@ -25,7 +26,7 @@ import scala.collection.Iterator
*
* @author Burak Emir
*/
-abstract class CachedFileStorage(private val file1: File) extends Thread with Logged {
+abstract class CachedFileStorage(private val file1: File) extends Thread {
private val file2 = new File(file1.getParent, file1.getName+"$")
@@ -111,7 +112,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
override def run = {
log("[run]\nstarting storage thread, checking every "+interval+" ms")
while (true) {
- Thread.sleep( this.interval )
+ Thread.sleep( this.interval.toLong )
save()
}
}
@@ -122,4 +123,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
this.dirty = true
save()
}
+
+ @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
+ def log(msg: String): Unit = {}
}
diff --git a/src/library/scala/xml/persistent/Index.scala b/src/library/scala/xml/persistent/Index.scala
index defaf67d52..9ee45e7086 100644
--- a/src/library/scala/xml/persistent/Index.scala
+++ b/src/library/scala/xml/persistent/Index.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package persistent
/** an Index returns some unique key that is part of a node
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
index d16c71c9f7..8db56a2e71 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/library/scala/xml/persistent/SetStorage.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package persistent
import scala.collection.mutable
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/library/scala/xml/pull/XMLEvent.scala
index a266380f87..3beb3648e7 100644
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ b/src/library/scala/xml/pull/XMLEvent.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package pull
/** An XML event for pull parsing. All events received during
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index 3f9584fd04..76e51e17fd 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package pull
import scala.io.Source
diff --git a/src/library/scala/xml/pull/package.scala b/src/library/scala/xml/pull/package.scala
index 3742c55513..0e3019446b 100644
--- a/src/library/scala/xml/pull/package.scala
+++ b/src/library/scala/xml/pull/package.scala
@@ -1,4 +1,5 @@
-package scala.xml
+package scala
+package xml
/**
* Classes needed to view an XML document as a series of events. The document
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index e427071177..c98339fd67 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package transform
/** A class for XML transformations.
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala
index 13210a6fd2..1399ee538d 100644
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ b/src/library/scala/xml/transform/RewriteRule.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package transform
/** A RewriteRule, when applied to a term, yields either
diff --git a/src/library/scala/xml/transform/RuleTransformer.scala b/src/library/scala/xml/transform/RuleTransformer.scala
index 85e92e5773..3a222ba759 100644
--- a/src/library/scala/xml/transform/RuleTransformer.scala
+++ b/src/library/scala/xml/transform/RuleTransformer.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.xml
+package scala
+package xml
package transform
class RuleTransformer(rules: RewriteRule*) extends BasicTransformer {
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala
index 2699083069..172fa29189 100644
--- a/src/partest/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest/scala/tools/partest/BytecodeTest.scala
@@ -2,10 +2,9 @@ package scala.tools.partest
import scala.tools.nsc.util.JavaClassPath
import scala.collection.JavaConverters._
-import scala.tools.asm
-import asm.{ ClassReader }
-import asm.tree.{ClassNode, MethodNode, InsnList}
-import java.io.InputStream
+import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.tree.{ClassNode, MethodNode, InsnList}
+import java.io.{FileOutputStream, FileInputStream, File => JFile, InputStream}
import AsmNode._
/**
@@ -127,3 +126,31 @@ abstract class BytecodeTest extends ASMConverters {
new JavaClassPath(containers, DefaultJavaContext)
}
}
+
+object BytecodeTest {
+ /** Parse `file` as a class file, transforms the ASM representation with `f`,
+ * and overwrites the orginal file.
+ */
+ def modifyClassFile(file: JFile)(f: ClassNode => ClassNode) {
+ val rfile = new reflect.io.File(file)
+ def readClass: ClassNode = {
+ val cr = new ClassReader(rfile.toByteArray())
+ val cn = new ClassNode()
+ cr.accept(cn, 0)
+ cn
+ }
+
+ def writeClass(cn: ClassNode) {
+ val writer = new ClassWriter(0)
+ cn.accept(writer)
+ val os = rfile.bufferedOutput()
+ try {
+ os.write(writer.toByteArray)
+ } finally {
+ os.close()
+ }
+ }
+
+ writeClass(f(readClass))
+ }
+}
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index 7495f97efd..df4a81dee2 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -19,7 +19,7 @@ abstract class CompilerTest extends DirectTest {
def check(source: String, unit: global.CompilationUnit): Unit
lazy val global: Global = newCompiler()
- lazy val units = compilationUnits(global)(sources: _ *)
+ lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
import global._
import definitions.{ compilerTypeFromTag }
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index 3f61062073..953b5e5535 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -6,8 +6,8 @@
package scala.tools.partest
import scala.tools.nsc._
-import io.Directory
-import util.{BatchSourceFile, CommandLineParser}
+import settings.ScalaVersion
+import util.{ SourceFile, BatchSourceFile, CommandLineParser }
import reporters.{Reporter, ConsoleReporter}
/** A class for testing code which is embedded as a string.
@@ -21,8 +21,8 @@ abstract class DirectTest extends App {
def show(): Unit
// the test file or dir, and output directory
- def testPath = io.File(sys.props("partest.test-path"))
- def testOutput = io.Directory(sys.props("partest.output"))
+ def testPath = SFile(sys.props("partest.test-path"))
+ def testOutput = Directory(sys.props("partest.output"))
// override to add additional settings with strings
def extraSettings: String = ""
@@ -46,18 +46,32 @@ abstract class DirectTest extends App {
def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
- def newSources(sourceCodes: String*) = sourceCodes.toList.zipWithIndex map {
- case (src, idx) => new BatchSourceFile("newSource" + (idx + 1), src)
- }
+ private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
+ codes.toList.zipWithIndex map {
+ case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
+ }
+
+ def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
+ def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
+
def compileString(global: Global)(sourceCode: String): Boolean = {
withRun(global)(_ compileSources newSources(sourceCode))
!global.reporter.hasErrors
}
- def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
- val units = withRun(global) { run =>
- run compileSources newSources(sourceCodes: _*)
+
+ def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
+ sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
+ }
+
+ def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
+ withRun(global) { run =>
+ run compileSources files
run.units.toList
}
+ }
+
+ def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
+ val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
if (global.reporter.hasErrors) {
global.reporter.flush()
sys.error("Compilation failure.")
@@ -84,4 +98,30 @@ abstract class DirectTest extends App {
final def log(msg: => Any) {
if (isDebug) Console.err println msg
}
+
+ /**
+ * Run a test only if the current java version is at least the version specified.
+ */
+ def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
+
+ class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
+ val javaVersion = System.getProperty("java.specification.version")
+
+ // the "ScalaVersion" class parses Java specification versions just fine
+ val requiredJavaVersion = ScalaVersion(version)
+ val executingJavaVersion = ScalaVersion(javaVersion)
+ val shouldRun = executingJavaVersion >= requiredJavaVersion
+ val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
+
+ def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
+
+ /*
+ * If the current java version is at least 'version' then 'yesRun' is evaluated
+ * otherwise 'fallback' is
+ */
+ def otherwise(fallback: =>A): A = {
+ logInfo()
+ if (shouldRun) yesRun else fallback
+ }
+ }
}
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
index f5333cc5f9..b12ec0de61 100644
--- a/src/partest/scala/tools/partest/IcodeTest.scala
+++ b/src/partest/scala/tools/partest/IcodeTest.scala
@@ -5,9 +5,7 @@
package scala.tools.partest
-import scala.tools.nsc._
-import nest.FileUtil._
-import io.Directory
+import scala.tools.partest.nest.FileUtil.compareContents
/** A trait for testing icode. All you need is this in a
* partest source file:
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
index 5d98a8be81..8478edeb4d 100644
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -1,13 +1,11 @@
package scala.tools
package partest
-import nsc.io.{ File, Path, Directory }
-import scala.tools.util.PathResolver
-import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import java.lang.Runtime.getRuntime
+import scala.concurrent.duration.Duration
+import scala.tools.nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+import java.lang.Runtime.{ getRuntime => runtime }
object PartestDefaults {
- import nsc.Properties._
def testRootName = propOrNone("partest.root")
def srcDirName = propOrElse("partest.srcdir", "files")
@@ -23,7 +21,8 @@ object PartestDefaults {
def testBuild = propOrNone("partest.build")
def errorCount = propOrElse("partest.errors", "0").toInt
- def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse getRuntime.availableProcessors
+ def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse runtime.availableProcessors
+ def waitTime = propOrNone("partest.timeout") map (Duration.apply) getOrElse Duration("4 hours")
- def timeout = "1200000"
+ //def timeout = "1200000" // per-test timeout
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 13207b16fd..b5b09a753a 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -10,14 +10,10 @@ package scala.tools
package partest
import scala.util.Properties.setProp
-import scala.tools.nsc.io.{ Directory, Path => SPath }
-import nsc.util.ClassPath
-import util.PathResolver
import scala.tools.ant.sabbus.CompilationPathProperty
-import java.io.File
import java.lang.reflect.Method
import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{Path, Reference, FileSet}
+import org.apache.tools.ant.types.{ Reference, FileSet}
import org.apache.tools.ant.types.Commandline.Argument
/** An Ant task to execute the Scala test suite (NSC).
@@ -26,92 +22,42 @@ import org.apache.tools.ant.types.Commandline.Argument
* - `srcdir`,
* - `classpath`,
* - `classpathref`,
- * - `showlog`,
- * - `showdiff`,
* - `erroronfailed`,
* - `javacmd`,
* - `javaccmd`,
* - `scalacopts`,
- * - `timeout`,
* - `debug`,
* - `junitreportdir`.
*
* It also takes the following parameters as nested elements:
* - `compilationpath`.
- * - `postests`,
- * - `negtests`,
- * - `runtests`,
- * - `jvmtests`,
- * - `residenttests`,
- * - `shootouttests`,
- * - `scalaptests`,
- * - `scalachecktests`,
- * - `specializedtests`,
- * - `instrumentedtests`,
- * - `presentationtests`,
- * - `scripttests`.
*
* @author Philippe Haller
*/
class PartestTask extends Task with CompilationPathProperty {
-
- def addConfiguredPosTests(input: FileSet) {
- posFiles = Some(input)
- }
-
- def addConfiguredNegTests(input: FileSet) {
- negFiles = Some(input)
- }
-
- def addConfiguredRunTests(input: FileSet) {
- runFiles = Some(input)
- }
-
- def addConfiguredJvmTests(input: FileSet) {
- jvmFiles = Some(input)
- }
-
- def addConfiguredResidentTests(input: FileSet) {
- residentFiles = Some(input)
- }
-
- def addConfiguredScalacheckTests(input: FileSet) {
- scalacheckFiles = Some(input)
- }
-
- def addConfiguredScriptTests(input: FileSet) {
- scriptFiles = Some(input)
- }
-
- def addConfiguredShootoutTests(input: FileSet) {
- shootoutFiles = Some(input)
- }
-
- def addConfiguredScalapTests(input: FileSet) {
- scalapFiles = Some(input)
- }
-
- def addConfiguredSpecializedTests(input: FileSet) {
- specializedFiles = Some(input)
- }
-
- def addConfiguredInstrumentedTests(input: FileSet) {
- instrumentedFiles = Some(input)
- }
-
- def addConfiguredPresentationTests(input: FileSet) {
- presentationFiles = Some(input)
- }
-
- def addConfiguredAntTests(input: FileSet) {
- antFiles = Some(input)
- }
-
+ type Path = org.apache.tools.ant.types.Path
+
+ private var kinds: List[String] = Nil
+ private var classpath: Option[Path] = None
+ private var debug = false
+ private var errorOnFailed: Boolean = true
+ private var jUnitReportDir: Option[File] = None
+ private var javaccmd: Option[File] = None
+ private var javacmd: Option[File] = Option(sys.props("java.home")) map (x => new File(x, "bin/java"))
+ private var scalacArgs: Option[Seq[Argument]] = None
+ private var srcDir: Option[String] = None
+ private var colors: Int = 0
def setSrcDir(input: String) {
srcDir = Some(input)
}
+ def setColors(input: String) {
+ try colors = input.toInt catch { case _: NumberFormatException => () }
+ if (colors > 0)
+ sys.props("partest.colors") = colors.toString
+ }
+
def setClasspath(input: Path) {
if (classpath.isEmpty)
classpath = Some(input)
@@ -127,15 +73,6 @@ class PartestTask extends Task with CompilationPathProperty {
def setClasspathref(input: Reference) {
createClasspath().setRefid(input)
}
-
- def setShowLog(input: Boolean) {
- showLog = input
- }
-
- def setShowDiff(input: Boolean) {
- showDiff = input
- }
-
def setErrorOnFailed(input: Boolean) {
errorOnFailed = input
}
@@ -144,6 +81,10 @@ class PartestTask extends Task with CompilationPathProperty {
javacmd = Some(input)
}
+ def setKinds(input: String) {
+ kinds = words(input)
+ }
+
def setJavacCmd(input: File) {
javaccmd = Some(input)
}
@@ -159,10 +100,6 @@ class PartestTask extends Task with CompilationPathProperty {
a
}
- def setTimeout(delay: String) {
- timeout = Some(delay)
- }
-
def setDebug(input: Boolean) {
debug = input
}
@@ -171,172 +108,35 @@ class PartestTask extends Task with CompilationPathProperty {
jUnitReportDir = Some(input)
}
- private var classpath: Option[Path] = None
- private var srcDir: Option[String] = None
- private var javacmd: Option[File] = None
- private var javaccmd: Option[File] = None
- private var showDiff: Boolean = false
- private var showLog: Boolean = false
- private var posFiles: Option[FileSet] = None
- private var negFiles: Option[FileSet] = None
- private var runFiles: Option[FileSet] = None
- private var jvmFiles: Option[FileSet] = None
- private var residentFiles: Option[FileSet] = None
- private var scalacheckFiles: Option[FileSet] = None
- private var scriptFiles: Option[FileSet] = None
- private var shootoutFiles: Option[FileSet] = None
- private var scalapFiles: Option[FileSet] = None
- private var specializedFiles: Option[FileSet] = None
- private var instrumentedFiles: Option[FileSet] = None
- private var presentationFiles: Option[FileSet] = None
- private var antFiles: Option[FileSet] = None
- private var errorOnFailed: Boolean = false
- private var scalacArgs: Option[Seq[Argument]] = None
- private var timeout: Option[String] = None
- private var jUnitReportDir: Option[File] = None
- private var debug = false
-
- def fileSetToDir(fs: FileSet) = Directory(fs getDir getProject)
- def fileSetToArray(fs: FileSet): Array[SPath] = {
- val root = fileSetToDir(fs)
- (fs getDirectoryScanner getProject).getIncludedFiles map (root / _)
- }
-
- private def getFiles(fileSet: Option[FileSet]): Array[File] = fileSet match {
- case None => Array()
- case Some(fs) => fileSetToArray(fs) filterNot (_ hasExtension "log") map (_.jfile)
- }
-
- private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
- case None => Array()
- case Some(fs) =>
- def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
- // println("----> " + fileSet)
-
- val fileTests = getFiles(Some(fs)) filterNot (x => shouldExclude(x.getName))
- val dirResult = getDirs(Some(fs)) filterNot (x => shouldExclude(x.getName))
- // println("dirs: " + dirResult.toList)
- // println("files: " + fileTests.toList)
-
- dirResult ++ fileTests
- }
-
- private def getDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
- case None => Array()
- case Some(fs) =>
- def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
-
- val dirTests: Iterator[SPath] = fileSetToDir(fs).dirs filterNot (x => shouldExclude(x.name))
- val dirResult = dirTests.toList.toArray map (_.jfile)
-
- dirResult
- }
-
-
- private def getPosFiles = getFilesAndDirs(posFiles)
- private def getNegFiles = getFilesAndDirs(negFiles)
- private def getRunFiles = getFilesAndDirs(runFiles)
- private def getJvmFiles = getFilesAndDirs(jvmFiles)
- private def getResidentFiles = getFiles(residentFiles)
- private def getScalacheckFiles = getFilesAndDirs(scalacheckFiles)
- private def getScriptFiles = getFiles(scriptFiles)
- private def getShootoutFiles = getFiles(shootoutFiles)
- private def getScalapFiles = getFiles(scalapFiles)
- private def getSpecializedFiles = getFiles(specializedFiles)
- private def getInstrumentedFiles = getFilesAndDirs(instrumentedFiles)
- private def getPresentationFiles = getDirs(presentationFiles)
- private def getAntFiles = getFiles(antFiles)
-
override def execute() {
- val opts = getProject().getProperties() get "env.PARTEST_OPTS"
- if (opts != null && opts.toString != "")
- opts.toString.split(" ") foreach { propDef =>
- log("setting system property " + propDef)
- val kv = propDef split "="
- val key = kv(0) substring 2
- val value = kv(1)
- setProp(key, value)
- }
-
- if (isPartestDebug || debug) {
- setProp("partest.debug", "true")
- nest.NestUI._verbose = true
+ if (debug || sys.props.contains("partest.debug")) {
+ nest.NestUI.setDebug()
}
srcDir foreach (x => setProp("partest.srcdir", x))
val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.")
-
- val scalaLibrary = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-library.jar" => true
- case "library" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala library.")
-
- val scalaReflect = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-reflect.jar" => true
- case "reflect" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala reflection library.")
-
- val scalaCompiler = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-compiler.jar" => true
- case "compiler" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala compiler.")
-
- val scalaPartest = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-partest.jar" => true
- case "partest" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala partest.")
-
- val scalaActors = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-actors.jar" => true
- case "actors" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+ val cpfiles = classpath.list map { fs => new File(fs) } toList
+ def findCp(name: String) = cpfiles find (f =>
+ (f.getName == s"scala-$name.jar")
+ || (f.absolutePathSegments endsWith Seq("classes", name))
+ ) getOrElse sys.error(s"Provided classpath does not contain a Scala $name element.")
+
+ val scalaLibrary = findCp("library")
+ val scalaReflect = findCp("reflect")
+ val scalaCompiler = findCp("compiler")
+ val scalaPartest = findCp("partest")
+ val scalaActors = findCp("actors")
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
- if(parts eq null) Seq[String]() else parts.toSeq
+ if (parts eq null) Nil else parts.toSeq
})
val antRunner = new scala.tools.partest.nest.AntRunner
val antFileManager = antRunner.fileManager
- // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, this paragraph of code can be safely removed
- // we hack into the classloader that will become parent classloader for scalac
- // this way we ensure that reflective macro lookup will pick correct Code.lift
- val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader]
- val path = new org.apache.tools.ant.types.Path(getProject())
- val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath)
- path.setPath(newClassPath)
- loader.setClassPath(path)
-
- antFileManager.showDiff = showDiff
- antFileManager.showLog = showLog
+ // antFileManager.failed = runFailed
antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
@@ -347,53 +147,35 @@ class PartestTask extends Task with CompilationPathProperty {
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
- timeout foreach (antFileManager.timeout = _)
-
- type TFSet = (Array[File], String, String)
- val testFileSets = List(
- (getPosFiles, "pos", "Compiling files that are expected to build"),
- (getNegFiles, "neg", "Compiling files that are expected to fail"),
- (getRunFiles, "run", "Compiling and running files"),
- (getJvmFiles, "jvm", "Compiling and running files"),
- (getResidentFiles, "res", "Running resident compiler scenarii"),
- (getScalacheckFiles, "scalacheck", "Running scalacheck tests"),
- (getScriptFiles, "script", "Running script files"),
- (getShootoutFiles, "shootout", "Running shootout tests"),
- (getScalapFiles, "scalap", "Running scalap tests"),
- (getSpecializedFiles, "specialized", "Running specialized files"),
- (getInstrumentedFiles, "instrumented", "Running instrumented files"),
- (getPresentationFiles, "presentation", "Running presentation compiler test files"),
- (getAntFiles, "ant", "Running ant task tests")
- )
-
- def runSet(set: TFSet): (Int, Int, Iterable[String]) = {
- val (files, name, msg) = set
+
+ def runSet(kind: String, files: Array[File]): (Int, Int, List[String]) = {
if (files.isEmpty) (0, 0, List())
else {
- log(msg)
- val results: Iterable[(String, TestState)] = antRunner.reflectiveRunTestsForFiles(files, name)
- val (succs, fails) = resultsToStatistics(results)
+ log(s"Running ${files.length} tests in '$kind' at $now")
+ // log(s"Tests: ${files.toList}")
+ val results = antRunner.reflectiveRunTestsForFiles(files, kind)
+ val (passed, failed) = results partition (_.isOk)
+ val numPassed = passed.size
+ val numFailed = failed.size
+ def failedMessages = failed map (_.longStatus)
- val failed: Iterable[String] = results collect {
- case (path, TestState.Fail) => path + " [FAILED]"
- case (path, TestState.Timeout) => path + " [TIMOUT]"
- }
+ log(s"Completed '$kind' at $now")
// create JUnit Report xml files if directory was specified
jUnitReportDir foreach { d =>
d.mkdir
- val report = testReport(name, results, succs, fails)
- scala.xml.XML.save(d.getAbsolutePath+"/"+name+".xml", report)
+ val report = testReport(kind, results, numPassed, numFailed)
+ scala.xml.XML.save(d.getAbsolutePath+"/"+kind+".xml", report)
}
- (succs, fails, failed)
+ (numPassed, numFailed, failedMessages)
}
}
- val _results = testFileSets map runSet
- val allSuccesses = _results map (_._1) sum
- val allFailures = _results map (_._2) sum
+ val _results = kinds map (k => runSet(k, TestKinds testsFor k map (_.jfile) toArray))
+ val allSuccesses = _results map (_._1) sum
+ val allFailures = _results map (_._2) sum
val allFailedPaths = _results flatMap (_._3)
def f = if (errorOnFailed && allFailures > 0) (sys error _) else log(_: String)
@@ -408,20 +190,17 @@ class PartestTask extends Task with CompilationPathProperty {
f(msg)
}
- private def oneResult(res: (String, TestState)) =
- <testcase name={res._1}>{
- res._2 match {
- case TestState.Ok => scala.xml.NodeSeq.Empty
- case TestState.Fail => <failure message="Test failed"/>
- case TestState.Timeout => <failure message="Test timed out"/>
- }
+ private def oneResult(res: TestState) =
+ <testcase name={res.testIdent}>{
+ if (res.isOk) scala.xml.NodeSeq.Empty
+ else <failure message="Test failed"/>
}</testcase>
- private def testReport(kind: String, results: Iterable[(String, TestState)], succs: Int, fails: Int) =
+ private def testReport(kind: String, results: Iterable[TestState], succs: Int, fails: Int) =
<testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
<properties/>
{
- results.map(oneResult(_))
+ results map oneResult
}
</testsuite>
}
diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala
new file mode 100644
index 0000000000..ec682690ca
--- /dev/null
+++ b/src/partest/scala/tools/partest/TestKinds.scala
@@ -0,0 +1,67 @@
+package scala.tools
+package partest
+
+import nest.PathSettings.srcDir
+
+object TestKinds {
+ val standardKinds = "pos neg run jvm res buildmanager scalacheck scalap specialized instrumented presentation ant" split "\\s+" toList
+ val standardArgs = standardKinds map ("--" + _)
+
+ def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml")
+ def denotesTestDir(p: Path) = kindOf(p) match {
+ case "res" => false
+ case _ => p.isDirectory && p.extension == ""
+ }
+ def denotesTestPath(p: Path) = denotesTestDir(p) || denotesTestFile(p)
+
+ // TODO
+ def isTestForPartest(p: Path) = (
+ (p.name == "intentional-failure.scala")
+ || (p.path contains "test-for-partest")
+ )
+
+ def kindOf(p: Path) = {
+ p.toAbsolute.segments takeRight 2 head
+
+ // (srcDir relativize p.toCanonical).segments match {
+ // case (".." :: "scaladoc" :: xs) => xs.head
+ // case xs => xs.head
+ // }
+ }
+ def logOf(p: Path) = {
+ p.parent / s"${p.stripExtension}-${kindOf(p)}.log"
+ // p.parent / s"${p.stripExtension}.log"
+ }
+
+ // true if a test path matches the --grep expression.
+ private def pathMatchesExpr(path: Path, expr: String) = {
+ // Matches the expression if any source file contains the expr,
+ // or if the checkfile contains it, or if the filename contains
+ // it (the last is case-insensitive.)
+ def matches(p: Path) = (
+ (p.path.toLowerCase contains expr.toLowerCase)
+ || (p.fileContents contains expr)
+ )
+ def candidates = {
+ (path changeExtension "check") +: {
+ if (path.isFile) List(path)
+ else path.toDirectory.deepList() filter (_.isJavaOrScala) toList
+ }
+ }
+
+ (candidates exists matches)
+ }
+
+ def groupedTests(paths: List[Path]): List[(String, List[Path])] =
+ (paths.distinct groupBy kindOf).toList sortBy (standardKinds indexOf _._1)
+
+ /** Includes tests for testing partest. */
+ private def allTestsForKind(kind: String): List[Path] =
+ (srcDir / kind toDirectory).list.toList filter denotesTestPath
+
+ def testsForPartest: List[Path] = standardKinds flatMap allTestsForKind filter isTestForPartest
+ def testsFor(kind: String): List[Path] = allTestsForKind(kind) filterNot isTestForPartest
+ def grepFor(expr: String): List[Path] = standardTests filter (t => pathMatchesExpr(t, expr))
+ def standardTests: List[Path] = standardKinds flatMap testsFor
+ def failedTests: List[Path] = standardTests filter (p => logOf(p).isFile)
+}
diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala
new file mode 100644
index 0000000000..dbe8a222a5
--- /dev/null
+++ b/src/partest/scala/tools/partest/TestState.scala
@@ -0,0 +1,65 @@
+package scala.tools.partest
+
+import scala.tools.nsc.FatalError
+import scala.tools.nsc.util.stackTraceString
+
+sealed abstract class TestState {
+ def testFile: File
+ def what: String
+ def reason: String
+ def transcript: List[String]
+
+ def isOk = false
+ def isSkipped = false
+ def testIdent = testFile.testIdent
+ def transcriptString = transcript mkString EOL
+
+ def identAndReason = testIdent + reasonString
+ def status = s"$what - $identAndReason"
+ def longStatus = status + transcriptString
+ def reasonString = if (reason == "") "" else s" [$reason]"
+
+ def shortStatus = if (isOk) "ok" else "!!"
+
+ override def toString = status
+}
+
+object TestState {
+ case class Uninitialized(testFile: File) extends TestState {
+ def what = "uninitialized"
+ def reason = what
+ def transcript = Nil
+ override def shortStatus = "??"
+ }
+ case class Pass(testFile: File) extends TestState {
+ def what = "pass"
+ override def isOk = true
+ def transcript: List[String] = Nil
+ def reason = ""
+ }
+ case class Updated(testFile: File) extends TestState {
+ def what = "updated"
+ override def isOk = true
+ def transcript: List[String] = Nil
+ def reason = "updated check file"
+ override def shortStatus = "++"
+ }
+ case class Skip(testFile: File, reason: String) extends TestState {
+ def what = "skip"
+ override def isOk = true
+ override def isSkipped = true
+ def transcript: List[String] = Nil
+ override def shortStatus = "--"
+ }
+ case class Fail(testFile: File, reason: String, transcript: List[String]) extends TestState {
+ def what = "fail"
+ }
+ case class Crash(testFile: File, caught: Throwable, transcript: List[String]) extends TestState {
+ def what = "crash"
+ def reason = s"caught $caught_s - ${caught.getMessage}"
+
+ private def caught_s = (caught.getClass.getName split '.').last
+ private def stack_s = stackTraceString(caught)
+ override def transcriptString = nljoin(super.transcriptString, caught_s)
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 93045b8c1d..1d3b79171b 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -10,24 +10,21 @@
package scala.tools.partest
package nest
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-
class AntRunner extends DirectRunner {
val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
+ var JAVACMD: String = "java"
+ var JAVAC_CMD: String = "javac"
+ var CLASSPATH: String = _
+ var LATEST_LIB: String = _
+ var LATEST_REFLECT: String = _
+ var LATEST_COMP: String = _
+ var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
+ val testRootPath: String = "test"
+ val testRootDir: Directory = Directory(testRootPath)
}
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String) =
+ def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): List[TestState] =
runTestsForFiles(kindFiles.toList, kind)
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
deleted file mode 100644
index a8694cc0d6..0000000000
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
-import scala.reflect.io.{ Directory, File => SFile, FileOperationException }
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-import scala.tools.nsc.util.{ ClassPath, FakePos }
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.util.PathResolver
-import io.Path
-import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
-import File.pathSeparator
-
-sealed abstract class CompilationOutcome {
- def merge(other: CompilationOutcome): CompilationOutcome
- def isPositive = this eq CompileSuccess
- def isNegative = this eq CompileFailed
-}
-case object CompileSuccess extends CompilationOutcome {
- def merge(other: CompilationOutcome) = other
-}
-case object CompileFailed extends CompilationOutcome {
- def merge(other: CompilationOutcome) = if (other eq CompileSuccess) this else other
-}
-case object CompilerCrashed extends CompilationOutcome {
- def merge(other: CompilationOutcome) = this
-}
-
-class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
- shortname = true
-}
-
-class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
- def this(cp: String) = this(cp, _ => ())
-
- nowarnings.value = false
- encoding.value = "UTF-8"
- classpath.value = cp
-}
-
-abstract class SimpleCompiler {
- def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome
-}
-
-class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
- def newGlobal(settings: Settings, reporter: Reporter): Global =
- Global(settings, reporter)
-
- def newGlobal(settings: Settings, logWriter: FileWriter): Global =
- newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
-
- def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
- def newSettings(outdir: String): TestSettings = {
- val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
- val s = new TestSettings(cp)
- s.outdir.value = outdir
-
- s
- }
-
- implicit class Copier(f: SFile) {
- // But what if f is bigger than CHUNK?!
- def copyTo(dest: Path) {
- dest.toFile writeAll f.slurp
- }
- }
-
- // plugin path can be relative to test root, or cwd is out
- private def updatePluginPath(options: String, out: Option[File], srcdir: Directory): String = {
- val dir = fileManager.testRootDir
- def pathOrCwd(p: String) =
- if (p == "." && out.isDefined) {
- val plugxml = "scalac-plugin.xml"
- val pout = Path(out.get)
- val pd = (srcdir / plugxml).toFile
- if (pd.exists) pd copyTo (pout / plugxml)
- pout
- } else Path(p)
- def absolutize(path: String) = pathOrCwd(path) match {
- case x if x.isAbsolute => x.path
- case x => (dir / x).toAbsolute.path
- }
-
- val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:")
- val plugins = opt1 map (_ stripPrefix "-Xplugin:") flatMap (_ split pathSeparator) map absolutize
- val pluginOption = if (opt1.isEmpty) Nil else List("-Xplugin:" + (plugins mkString pathSeparator))
-
- (opt2 ::: pluginOption) mkString " "
- }
-
- def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome = {
- val testSettings = out match {
- case Some(f) => newSettings(f.getAbsolutePath)
- case _ => newSettings()
- }
- val logWriter = new FileWriter(log)
-
- // this api has no notion of srcdir, so fake it
- val fstFile = SFile(files(0))
- val srcdir = fstFile.parent
-
- // check whether there is a ".flags" file
- def convertFlags(f: SFile) = updatePluginPath(f.slurp(), out, srcdir)
- val logFile = basename(log.getName)
- val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
- val argString = (SFile(log).parent / flagsFileName) ifFile (convertFlags) getOrElse ""
-
- // slurp local flags (e.g., "A_1.flags")
- def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num)
- val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List())
- val localFlagsList = if (inGroup.nonEmpty) {
- val localArgString = (srcdir / (fstFile.stripExtension + ".flags")) ifFile (convertFlags) getOrElse ""
- localArgString.split(' ').toList.filter(_.length > 0)
- } else List()
-
- val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0) ::: localFlagsList
- val args = allOpts.toList
-
- NestUI.verbose("scalac options: "+allOpts)
-
- val command = new CompilerCommand(args, testSettings)
- val global = newGlobal(command.settings, logWriter)
- val testRep: ExtConsoleReporter = global.reporter.asInstanceOf[ExtConsoleReporter]
-
- val testFileFn: (File, FileManager) => TestFile = kind match {
- case "pos" => PosTestFile.apply
- case "neg" => NegTestFile.apply
- case "run" => RunTestFile.apply
- case "jvm" => JvmTestFile.apply
- case "shootout" => ShootoutTestFile.apply
- case "scalap" => ScalapTestFile.apply
- case "scalacheck" => ScalaCheckTestFile.apply
- case "specialized" => SpecializedTestFile.apply
- case "instrumented" => InstrumentedTestFile.apply
- case "presentation" => PresentationTestFile.apply
- case "ant" => AntTestFile.apply
- }
- val test: TestFile = testFileFn(files.head, fileManager)
- if (!test.defineSettings(command.settings, out.isEmpty)) {
- testRep.error(FakePos("partest"), test.flags match {
- case Some(flags) => "bad flags: " + flags
- case _ => "bad settings: " + command.settings
- })
- }
-
- val toCompile = files map (_.getPath)
-
- try {
- NestUI.verbose("compiling "+toCompile)
- NestUI.verbose("with classpath: "+global.classPath.toString)
- NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
- try {
- if (command.shouldStopWithInfo) logWriter append (command getInfoMessage global)
- else new global.Run compile toCompile
- } catch {
- case FatalError(msg) =>
- testRep.error(null, "fatal error: " + msg)
- return CompilerCrashed
- }
-
- testRep.printSummary()
- testRep.writer.close()
- }
- finally logWriter.close()
-
- if (testRep.hasErrors || command.shouldStopWithInfo) CompileFailed
- else CompileSuccess
- }
-}
-
-class CompileManager(val fileManager: FileManager) {
- private def newCompiler = new DirectCompiler(fileManager)
- def attemptCompile(outdir: Option[File], sources: List[File], kind: String, log: File): CompilationOutcome =
- newCompiler.compile(outdir, sources, kind, log)
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 0ec3f60bf5..b436675d3a 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -3,20 +3,15 @@
* @author Philipp Haller
*/
-// $Id$
+
package scala.tools.partest
package nest
-import java.io.{ File, FilenameFilter, IOException, StringWriter }
+import java.io.{ FilenameFilter, IOException }
import java.net.URI
import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
-import scala.tools.util.PathResolver
import scala.tools.nsc.{ io, util }
-import util.{ ClassPath }
-import io.{ Path, Directory }
-import File.pathSeparator
-import ClassPath.{ join }
import PathResolver.{ Environment, Defaults }
class ConsoleFileManager extends FileManager {
@@ -55,7 +50,7 @@ class ConsoleFileManager extends FileManager {
var JAVAC_CMD = PartestDefaults.javacCmd
- NestUI.verbose("CLASSPATH: "+CLASSPATH)
+ vlog("CLASSPATH: "+CLASSPATH)
if (!srcDir.isDirectory) {
NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
@@ -70,14 +65,14 @@ class ConsoleFileManager extends FileManager {
}
def findLatest() {
- NestUI.verbose("test parent: "+testParent)
+ vlog("test parent: "+testParent)
- def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).toCanonical
+ def prefixFileWith(parent: File, relPath: String) = (SFile(parent) / relPath).toCanonical
def prefixFile(relPath: String) = (testParent / relPath).toCanonical
if (!testClasses.isEmpty) {
testClassesDir = Path(testClasses.get).toCanonical.toDirectory
- NestUI.verbose("Running with classes in "+testClassesDir)
+ vlog("Running with classes in "+testClassesDir)
latestLibFile = testClassesDir / "library"
latestActorsFile = testClassesDir / "library" / "actors"
@@ -87,7 +82,7 @@ class ConsoleFileManager extends FileManager {
}
else if (testBuild.isDefined) {
val dir = Path(testBuild.get)
- NestUI.verbose("Running on "+dir)
+ vlog("Running on "+dir)
latestLibFile = dir / "lib/scala-library.jar"
latestActorsFile = dir / "lib/scala-actors.jar"
latestReflectFile = dir / "lib/scala-reflect.jar"
@@ -96,7 +91,7 @@ class ConsoleFileManager extends FileManager {
}
else {
def setupQuick() {
- NestUI.verbose("Running build/quick")
+ vlog("Running build/quick")
latestLibFile = prefixFile("build/quick/classes/library")
latestActorsFile = prefixFile("build/quick/classes/library/actors")
latestReflectFile = prefixFile("build/quick/classes/reflect")
@@ -105,7 +100,7 @@ class ConsoleFileManager extends FileManager {
}
def setupInst() {
- NestUI.verbose("Running dist (installed)")
+ vlog("Running dist (installed)")
val p = testParent.getParentFile
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
@@ -115,7 +110,7 @@ class ConsoleFileManager extends FileManager {
}
def setupDist() {
- NestUI.verbose("Running dists/latest")
+ vlog("Running dists/latest")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
@@ -124,7 +119,7 @@ class ConsoleFileManager extends FileManager {
}
def setupPack() {
- NestUI.verbose("Running build/pack")
+ vlog("Running build/pack")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
@@ -170,11 +165,13 @@ class ConsoleFileManager extends FileManager {
var latestReflectFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
- def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
+ //def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
+ //def latestScalapFile: File = new File(latestLibFile.getParentFile, "scalap.jar")
var testClassesDir: Directory = _
// initialize above fields
findLatest()
+ /*
def getFiles(kind: String, cond: Path => Boolean): List[File] = {
def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
@@ -187,4 +184,6 @@ class ConsoleFileManager extends FileManager {
( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
}
+ */
+ var latestFjbgFile: File = _
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index fd4d52f603..8161e53bf9 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -3,85 +3,121 @@
* @author Philipp Haller
*/
-// $Id$
-
-package scala.tools.partest
+package scala.tools
+package partest
package nest
-import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
- InputStreamReader, StringWriter, PrintWriter}
import utils.Properties._
import scala.tools.nsc.Properties.{ versionMsg, setProp }
import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.io
-import io.{ Path }
import scala.collection.{ mutable, immutable }
+import PathSettings.srcDir
+import TestKinds._
+import scala.reflect.internal.util.Collections.distinctBy
class ConsoleRunner extends DirectRunner {
- import PathSettings.{ srcDir, testRoot }
-
- case class TestSet(kind: String, filter: Path => Boolean, msg: String)
- private def stdFilter(p: Path) = p.isDirectory || (p hasExtension "scala")
- private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml")
-
- val testSets = {
- List(
- TestSet("pos", stdFilter, "Testing compiler (on files whose compilation should succeed)"),
- TestSet("neg", stdFilter, "Testing compiler (on files whose compilation should fail)"),
- TestSet("run", stdFilter, "Testing interpreter and backend"),
- TestSet("jvm", stdFilter, "Testing JVM backend"),
- TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"),
- TestSet("shootout", stdFilter, "Testing shootout tests"),
- TestSet("script", stdFilter, "Testing script tests"),
- TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"),
- TestSet("scalap", _.isDirectory, "Run scalap decompiler tests"),
- TestSet("specialized", stdFilter, "Testing specialized tests"),
- TestSet("instrumented", stdFilter, "Testing instrumented tests"),
- TestSet("presentation", _.isDirectory, "Testing presentation compiler tests."),
- TestSet("ant", antFilter, "Run Ant task tests.")
- )
- }
+ import NestUI._
+ import NestUI.color._
+
+ // So we can ctrl-C a test run and still hear all
+ // the buffered failure info.
+ scala.sys addShutdownHook issueSummaryReport()
var fileManager: ConsoleFileManager = _
- private var testFiles: List[File] = List()
- private val errors = PartestDefaults.errorCount
- private val testSetKinds = testSets map (_.kind)
- private val testSetArgs = testSets map ("--" + _.kind)
- private val testSetArgMap = testSetArgs zip testSets toMap
+ private var totalTests = 0
+ private val passedTests = mutable.ListBuffer[TestState]()
+ private val failedTests = mutable.ListBuffer[TestState]()
+
+ def comment(s: String) = echo(magenta("# " + s))
+ def levyJudgment() = {
+ if (totalTests == 0) echoMixed("No tests to run.")
+ else if (elapsedMillis == 0) echoMixed("Test Run ABORTED")
+ else if (isSuccess) echoPassed("Test Run PASSED")
+ else echoFailed("Test Run FAILED")
+ }
+
+ def passFailString(passed: Int, failed: Int, skipped: Int): String = {
+ val total = passed + failed + skipped
+ val isSuccess = failed == 0
+ def p0 = s"$passed/$total"
+ def p = ( if (isSuccess) bold(green(p0)) else p0 ) + " passed"
+ def f = if (failed == 0) "" else bold(red("" + failed)) + " failed"
+ def s = if (skipped == 0) "" else bold(yellow("" + skipped)) + " skipped"
- private def printVersion() { NestUI outline (versionMsg + "\n") }
+ oempty(p, f, s) mkString ", "
+ }
+
+ private var summarizing = false
+ private var elapsedMillis = 0L
+ private var expectedFailures = 0
+ private def isSuccess = failedTests.size == expectedFailures
+
+ def issueSummaryReport() {
+ // Don't run twice
+ if (!summarizing) {
+ summarizing = true
+
+ val passed0 = passedTests.toList
+ val failed0 = failedTests.toList
+ val passed = passed0.size
+ val failed = failed0.size
+ val skipped = totalTests - (passed + failed)
+ val passFail = passFailString(passed, failed, skipped)
+ val elapsed = if (elapsedMillis > 0) " (elapsed time: " + elapsedString(elapsedMillis) + ")" else ""
+ val message = passFail + elapsed
+
+ if (failed0.nonEmpty) {
+ if (isPartestVerbose) {
+ echo(bold(cyan("##### Transcripts from failed tests #####\n")))
+ failed0 foreach { state =>
+ comment("partest " + state.testFile)
+ echo(state.transcriptString + "\n")
+ }
+ }
+
+ def files_s = failed0.map(_.testFile).mkString(""" \""" + "\n ")
+ echo("# Failed test paths (this command will update checkfiles)")
+ echo("test/partest --update-check \\\n " + files_s + "\n")
+ }
+
+ echo(message)
+ levyJudgment()
+ }
+ }
private val unaryArgs = List(
- "--pack", "--all", "--verbose", "--show-diff", "--show-log",
+ "--pack", "--all",
+ "--terse", "--verbose", "--show-diff", "--show-log", "--self-test",
"--failed", "--update-check", "--version", "--ansi", "--debug", "--help"
- ) ::: testSetArgs
+ ) ::: standardArgs
private val binaryArgs = List(
- "--grep", "--srcpath", "--buildpath", "--classpath"
+ "--grep", "--srcpath", "--buildpath", "--classpath", "--timeout"
)
- // true if a test path matches the --grep expression.
- private def pathMatchesExpr(path: Path, expr: String) = {
- def pred(p: Path) = file2String(p.toFile) contains expr
- def greppable(f: Path) = f.isFile && (f hasExtension ("scala", "java"))
- def any(d: Path) = d.toDirectory.deepList() exists (f => greppable(f) && pred(f))
-
- (path.isFile && pred(path)) ||
- (path.isDirectory && any(path)) ||
- (pred(path changeExtension "check"))
- }
-
def main(argstr: String) {
val parsed = CommandLineParser(argstr) withUnaryArgs unaryArgs withBinaryArgs binaryArgs
- val args = onlyValidTestPaths(parsed.residualArgs)
- /* Early return on no args, version, or invalid args */
- if (argstr == "") return NestUI.usage()
- if (parsed isSet "--version") return printVersion
- if (parsed isSet "--help") return NestUI.usage()
+ if (parsed isSet "--debug") NestUI.setDebug()
+ if (parsed isSet "--verbose") NestUI.setVerbose()
+ if (parsed isSet "--terse") NestUI.setTerse()
+ if (parsed isSet "--show-diff") NestUI.setDiffOnFail()
+
+ // Early return on no args, version, or invalid args
+ if (parsed isSet "--version") return echo(versionMsg)
+ if ((argstr == "") || (parsed isSet "--help")) return NestUI.usage()
+
+ val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath
+ if (invalid.nonEmpty) {
+ if (isPartestVerbose)
+ invalid foreach (p => echoWarning(s"Discarding invalid test path " + p))
+ else if (!isPartestTerse)
+ echoWarning(s"Discarding ${invalid.size} invalid test paths")
+ }
parsed get "--srcpath" foreach (x => setProp("partest.srcdir", x))
+ parsed get "--timeout" foreach (x => setProp("partest.timeout", x))
fileManager =
if (parsed isSet "--buildpath") new ConsoleFileManager(parsed("--buildpath"))
@@ -89,144 +125,102 @@ class ConsoleRunner extends DirectRunner {
else if (parsed isSet "--pack") new ConsoleFileManager("build/pack")
else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
- NestUI._verbose = parsed isSet "--verbose"
- fileManager.showDiff = true
- // parsed isSet "--show-diff"
fileManager.updateCheck = parsed isSet "--update-check"
- fileManager.showLog = parsed isSet "--show-log"
fileManager.failed = parsed isSet "--failed"
- if (parsed isSet "--ansi") NestUI initialize NestUI.MANY
- if (parsed isSet "--timeout") fileManager.timeout = parsed("--timeout")
- if (parsed isSet "--debug") setProp("partest.debug", "true")
+ val partestTests = (
+ if (parsed isSet "--self-test") TestKinds.testsForPartest
+ else Nil
+ )
- def addTestFile(file: File) = {
- if (!file.exists)
- NestUI.failure("Test file '%s' not found, skipping.\n" format file)
- else {
- NestUI.verbose("adding test file " + file)
- testFiles +:= file
- }
- }
+ val grepExpr = parsed get "--grep" getOrElse ""
// If --grep is given we suck in every file it matches.
-
- val grepOption = parsed get "--grep"
- val grepPaths = grepOption.toList flatMap { expr =>
- val subjectDirs = testSetKinds map (srcDir / _ toDirectory)
- val testPaths = subjectDirs flatMap (_.list filter stdFilter)
- val paths = testPaths filter (p => pathMatchesExpr(p, expr))
-
+ var grepMessage = ""
+ val greppedTests = if (grepExpr == "") Nil else {
+ val paths = grepFor(grepExpr)
if (paths.isEmpty)
- NestUI.failure("--grep string '%s' matched no tests." format expr)
+ echoWarning(s"grep string '$grepExpr' matched no tests.\n")
- paths map (_.jfile)
+ paths.sortBy(_.toString)
}
- val grepMessage = grepOption map (x => "Argument '%s' matched %d test(s)".format(x, grepPaths.size)) getOrElse ""
- grepPaths foreach addTestFile
- args foreach (x => addTestFile(new File(x)))
-
- // If no file arguments were given, we assume --all
- val enabledTestSets: List[TestSet] = {
- val enabledArgs = testSetArgs filter parsed.isSet
-
- if (args.isEmpty && !(parsed isSet "--grep") && (enabledArgs.isEmpty || (parsed isSet "--all"))) testSets
- else enabledArgs map testSetArgMap
- }
+ val isRerun = parsed isSet "--failed"
+ val rerunTests = if (isRerun) TestKinds.failedTests else Nil
+ def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests
+ val givenKinds = standardArgs filter parsed.isSet
+ val kinds = (
+ if (parsed isSet "--all") standardKinds
+ else if (givenKinds.nonEmpty) givenKinds map (_ stripPrefix "--")
+ else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all
+ else Nil
+ )
+ val kindsTests = kinds flatMap testsFor
val dir =
if (fileManager.testClasses.isDefined) fileManager.testClassesDir
else fileManager.testBuildFile getOrElse {
fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
}
- val vmBin = javaHome + File.separator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- NestUI.verbose("enabled test sets: " + (enabledTestSets map (_.kind) mkString " "))
-
- List(
- "Scala compiler classes in: " + dir,
- "Scala version is: " + versionMsg,
- "Scalac options are: " + fileManager.SCALAC_OPTS,
- "Java binaries in: " + vmBin,
- "Java runtime is: " + vmName,
- "Java options are: " + vmOpts,
- "Source directory is: " + srcDir,
- ""
- ) foreach (x => NestUI verbose (x + "\n"))
-
- NestUI.verbose("available processors: " + Runtime.getRuntime().availableProcessors())
-
- // Dragged down here so it isn't buried under the banner.
- if (grepMessage != "")
- NestUI.normal(grepMessage + "\n")
-
- val ((successes, failures), elapsedMillis) = timed(testCheckAll(enabledTestSets))
- val total = successes + failures
-
- val elapsedSecs = elapsedMillis/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- val dispElapsed = {
- def form(num: Long) = if (num < 10) "0"+num else ""+num
- form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
+ def testContributors = {
+ List(
+ if (partestTests.isEmpty) "" else "partest self-tests",
+ if (rerunTests.isEmpty) "" else "previously failed tests",
+ if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories",
+ if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'",
+ if (individualTests.isEmpty) "" else "specified tests"
+ ) filterNot (_ == "") mkString ", "
}
- if (failures == 0)
- NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
- else
- NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n")
+ def banner = {
+ val vmBin = javaHome + fileSeparator + "bin"
+ val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
+ val vmOpts = fileManager.JAVA_OPTS
+
+ s"""|Scala compiler classes in: $dir
+ |Scala version is: $versionMsg
+ |Scalac options are: ${fileManager.SCALAC_OPTS mkString " "}
+ |Java binaries in: $vmBin
+ |Java runtime is: $vmName
+ |Java options are: $vmOpts
+ |Source directory is: $srcDir
+ |Available processors: ${Runtime.getRuntime().availableProcessors()}
+ |Java Classpath: ${sys.props("java.class.path")}
+ """.stripMargin
+ }
- System exit ( if (failures == errors) 0 else 1 )
- }
+ chatty(banner)
- def runTests(testSet: TestSet): (Int, Int) = {
- val TestSet(kind, filter, msg) = testSet
+ val allTests: List[Path] = distinctBy(miscTests ++ kindsTests)(_.toCanonical) sortBy (_.toString)
+ val grouped = (allTests groupBy kindOf).toList sortBy (x => standardKinds indexOf x._1)
- fileManager.getFiles(kind, filter) match {
- case Nil => NestUI.verbose("test dir empty\n") ; (0, 0)
- case files =>
- NestUI.verbose("test files: "+files)
- NestUI.outline("\n"+msg+"\n")
- resultsToStatistics(runTestsForFiles(files, kind))
+ totalTests = allTests.size
+ expectedFailures = propOrNone("partest.errors") match {
+ case Some(num) => num.toInt
+ case _ => 0
}
- }
-
- /**
- * @return (success count, failure count)
- */
- def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
- def kindOf(f: File) = {
- (srcDir relativize Path(f).toCanonical).segments match {
- case (".." :: "scaladoc" :: xs) => xs.head
- case xs => xs.head
+ val expectedFailureMessage = if (expectedFailures == 0) "" else s" (expecting $expectedFailures to fail)"
+ echo(s"Selected $totalTests tests drawn from $testContributors$expectedFailureMessage\n")
+
+ val (_, millis) = timed {
+ for ((kind, paths) <- grouped) {
+ val num = paths.size
+ val ss = if (num == 1) "" else "s"
+ comment(s"starting $num test$ss in $kind")
+ val results = runTestsForFiles(paths map (_.jfile), kind)
+ val (passed, failed) = results partition (_.isOk)
+
+ passedTests ++= passed
+ failedTests ++= failed
+ if (failed.nonEmpty) {
+ comment(passFailString(passed.size, failed.size, 0) + " in " + kind)
+ }
+ echo("")
}
}
-
- val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x))
- invalid foreach (x => NestUI.failure(
- "Invalid test file '%s', skipping.\n".format(x) +
- "(Test kind '%s' not in known set '%s')".format(kindOf(x), testSetKinds))
- )
-
- val grouped = (valid groupBy kindOf).toList sortBy (x => testSetKinds indexOf x._1)
- val runTestsFileLists =
- for ((kind, files) <- grouped) yield {
- NestUI.outline("\nTesting individual files\n")
- resultsToStatistics(runTestsForFiles(files, kind))
- }
-
- if (enabledSets.nonEmpty)
- NestUI.verbose("Run sets: "+enabledSets)
-
- val results = runTestsFileLists ::: (enabledSets map runTests)
-
- (results map (_._1) sum, results map (_._2) sum)
+ this.elapsedMillis = millis
+ issueSummaryReport()
+ System exit ( if (isSuccess) 0 else 1 )
}
}
diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
new file mode 100644
index 0000000000..8e5ff2abc4
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
@@ -0,0 +1,105 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools.partest
+package nest
+
+import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
+import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
+import scala.tools.nsc.util.{ FakePos, stackTraceString }
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
+import scala.reflect.io.AbstractFile
+import scala.reflect.internal.util.Position
+import java.io.{ BufferedReader, PrintWriter, FileReader, Writer, FileWriter }
+
+class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
+ shortname = true
+ // override def error(pos: Position, msg: String): Unit
+}
+
+class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
+ def this(cp: String) = this(cp, _ => ())
+
+ nowarnings.value = false
+ encoding.value = "UTF-8"
+ classpath.value = cp
+}
+
+class PartestGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
+ // override def abort(msg: String): Nothing
+ // override def globalError(msg: String): Unit
+ // override def supplementErrorMessage(msg: String): String
+}
+class DirectCompiler(val fileManager: FileManager) {
+ def newGlobal(settings: Settings, reporter: Reporter): PartestGlobal =
+ new PartestGlobal(settings, reporter)
+
+ def newGlobal(settings: Settings, logWriter: FileWriter): Global =
+ newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
+
+ def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
+ def newSettings(outdir: String): TestSettings = {
+ val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
+ val s = new TestSettings(cp)
+ s.outdir.value = outdir
+ s
+ }
+
+ def compile(runner: Runner, opts0: List[String], sources: List[File]): TestState = {
+ import runner.{ sources => _, _ }
+
+ val testSettings = new TestSettings(ClassPath.join(fileManager.LATEST_LIB, outDir.getPath))
+ val logWriter = new FileWriter(logFile)
+ val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile
+ val opts = fileManager.updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir)
+ val command = new CompilerCommand(opts, testSettings)
+ val global = newGlobal(testSettings, logWriter)
+ val reporter = global.reporter.asInstanceOf[ExtConsoleReporter]
+ def errorCount = reporter.ERROR.count
+
+ def defineSettings(s: Settings) = {
+ s.outputDirs setSingleOutput outDir.getPath
+ // adding codelib.jar to the classpath
+ // codelib provides the possibility to override standard reify
+ // this shields the massive amount of reification tests from changes in the API
+ prependToClasspaths(s, codelib)
+ s.classpath append fileManager.CLASSPATH // adding this why?
+
+ // add the instrumented library version to classpath
+ if (kind == "specialized")
+ prependToClasspaths(s, speclib)
+
+ // check that option processing succeeded
+ opts0.isEmpty || command.ok
+ }
+
+ if (!defineSettings(testSettings))
+ if (opts0.isEmpty)
+ reporter.error(null, s"bad settings: $testSettings")
+ else
+ reporter.error(null, opts0.mkString("bad options: ", space, ""))
+
+ def ids = sources.map(_.testIdent) mkString space
+ vlog(s"% scalac $ids")
+
+ def execCompile() =
+ if (command.shouldStopWithInfo) {
+ logWriter append (command getInfoMessage global)
+ runner genFail "compilation stopped with info"
+ } else {
+ new global.Run compile sources.map(_.getPath)
+ if (!reporter.hasErrors) runner.genPass()
+ else {
+ reporter.printSummary()
+ reporter.writer.close()
+ runner.genFail(s"compilation failed with $errorCount errors")
+ }
+ }
+
+ try { execCompile() }
+ catch { case t: Throwable => reporter.error(null, t.getMessage) ; runner.genCrash(t) }
+ finally { logWriter.close() }
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
deleted file mode 100644
index 7e4c3b842c..0000000000
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.util.Properties.setProp
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.io.Path
-import scala.collection.{ mutable, immutable }
-import java.util.concurrent._
-
-case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
-
-trait DirectRunner {
- def fileManager: FileManager
-
- import PartestDefaults.numThreads
-
- def denotesTestFile(arg: String) = Path(arg).hasExtension("scala", "res", "xml")
- def denotesTestDir(arg: String) = Path(arg).ifDirectory(_.files.nonEmpty) exists (x => x)
- def denotesTestPath(arg: String) = denotesTestDir(arg) || denotesTestFile(arg)
-
- /** No duplicate, no empty directories, don't mess with this unless
- * you like partest hangs.
- */
- def onlyValidTestPaths[T](args: List[T]): List[T] = {
- args.distinct filter (arg => denotesTestPath("" + arg) || {
- NestUI.warning("Discarding invalid test path '%s'\n" format arg)
- false
- })
- }
- def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
- System.setProperty("line.separator", "\n")
-
- val allUrls = PathSettings.scalaCheck.toURL :: fileManager.latestUrls
- val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(allUrls)
- val kindFiles = onlyValidTestPaths(_kindFiles)
- val pool = Executors.newFixedThreadPool(numThreads)
- val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
- val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
-
- pool.shutdown()
-
- try if (!pool.awaitTermination(4, TimeUnit.HOURS))
- NestUI.warning("Thread pool timeout elapsed before all tests were complete!")
- catch { case t: InterruptedException =>
- NestUI.warning("Thread pool was interrupted")
- t.printStackTrace()
- }
-
- for ((file, future) <- futures) yield {
- val state = if (future.isCancelled) TestState.Timeout else future.get
- (file.getAbsolutePath, state)
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 23546c6511..230ada4803 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -12,7 +12,7 @@ import java.io.{File, FilenameFilter, IOException, StringWriter,
FileInputStream, FileOutputStream, BufferedReader,
FileReader, PrintWriter, FileWriter}
import java.net.URI
-import scala.tools.nsc.io.{ Path, Directory, File => SFile }
+import scala.reflect.io.AbstractFile
import scala.collection.mutable
trait FileUtil {
@@ -62,16 +62,19 @@ trait FileManager extends FileUtil {
var LATEST_ACTORS: String
protected def relativeToLibrary(what: String): String = {
- if (LATEST_LIB endsWith ".jar") {
- (SFile(LATEST_LIB).parent / s"scala-$what.jar").toAbsolute.path
- }
- else {
+ def jarname = if (what startsWith "scala") s"$what.jar" else s"scala-$what.jar"
+ if (LATEST_LIB endsWith ".jar")
+ (SFile(LATEST_LIB).parent / jarname).toAbsolute.path
+ else
(SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
- }
}
def latestScaladoc = relativeToLibrary("scaladoc")
def latestInteractive = relativeToLibrary("interactive")
- def latestPaths = List(LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS, latestScaladoc, latestInteractive)
+ def latestScalapFile = relativeToLibrary("scalap")
+ def latestPaths = List(
+ LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS,
+ latestScalapFile, latestScaladoc, latestInteractive
+ )
def latestFiles = latestPaths map (p => new java.io.File(p))
def latestUrls = latestFiles map (_.toURI.toURL)
@@ -82,7 +85,6 @@ trait FileManager extends FileUtil {
var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
var JAVA_OPTS = PartestDefaults.javaOpts
- var timeout = PartestDefaults.timeout
/** Only when --debug is given. */
lazy val testTimings = new mutable.HashMap[String, Long]
@@ -128,4 +130,34 @@ trait FileManager extends FileUtil {
f.printlnAll(f.lines.toList map replace: _*)
}
+
+ /** Massage args to merge plugins and fix paths.
+ * Plugin path can be relative to test root, or cwd is out.
+ * While we're at it, mix in the baseline options, too.
+ * That's how ant passes in the plugins dir.
+ */
+ def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): List[String] = {
+ val dir = testRootDir
+ // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual)
+ def pathOrCwd(p: String) =
+ if (p == ".") {
+ val plugxml = "scalac-plugin.xml"
+ val pout = if (out.isVirtual) Directory.makeTemp() else Path(out.path)
+ val srcpath = Path(srcdir.path)
+ val pd = (srcpath / plugxml).toFile
+ if (pd.exists) pd copyTo (pout / plugxml)
+ pout
+ } else Path(p)
+ def absolutize(path: String) = pathOrCwd(path) match {
+ case x if x.isAbsolute => x.path
+ case x => (dir / x).toAbsolute.path
+ }
+
+ val xprefix = "-Xplugin:"
+ val (xplugs, others) = args partition (_ startsWith xprefix)
+ val Xplugin = if (xplugs.isEmpty) Nil else List(xprefix +
+ (xplugs map (_ stripPrefix xprefix) flatMap (_ split pathSeparator) map absolutize mkString pathSeparator)
+ )
+ SCALAC_OPTS.toList ::: others ::: Xplugin
+ }
}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
index df90b22448..564270e531 100644
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ b/src/partest/scala/tools/partest/nest/NestUI.scala
@@ -3,14 +3,42 @@
* @author Philipp Haller
*/
-// $Id$
-
package scala.tools.partest
package nest
import java.io.PrintWriter
+class Colors(enabled: => Boolean) {
+ import Console._
+
+ val bold = colored(BOLD)
+ val yellow = colored(YELLOW)
+ val green = colored(GREEN)
+ val blue = colored(BLUE)
+ val red = colored(RED)
+ val red_b = colored(RED_B)
+ val green_b = colored(GREEN_B)
+ val cyan = colored(CYAN)
+ val magenta = colored(MAGENTA)
+
+ private def colored(code: String): String => String =
+ s => if (enabled) code + s + RESET else s
+}
+
object NestUI {
+ private val testNum = new java.util.concurrent.atomic.AtomicInteger(1)
+ @volatile private var testNumberFmt = "%3d"
+ // @volatile private var testNumber = 1
+ private def testNumber = testNumberFmt format testNum.getAndIncrement()
+ def resetTestNumber(max: Int = -1) {
+ testNum set 1
+ val width = if (max > 0) max.toString.length else 3
+ testNumberFmt = s"%${width}d"
+ }
+
+ var colorEnabled = sys.props contains "partest.colors"
+ val color = new Colors(colorEnabled)
+ import color._
val NONE = 0
val SOME = 1
@@ -22,11 +50,66 @@ object NestUI {
private var _warning = ""
private var _default = ""
+ private var dotCount = 0
+ private val DotWidth = 72
+
+ def leftFlush() {
+ if (dotCount != 0) {
+ normal("\n")
+ dotCount = 0
+ }
+ }
+
+ def statusLine(state: TestState) = {
+ import state._
+ import TestState._
+ val colorizer = state match {
+ case _: Skip => yellow
+ case _: Updated => cyan
+ case s if s.isOk => green
+ case _ => red
+ }
+ val word = bold(colorizer(state.shortStatus))
+ f"$word $testNumber - $testIdent%-40s$reasonString"
+ }
+
+ def reportTest(state: TestState) = {
+ if (isTerse && state.isOk) {
+ if (dotCount >= DotWidth) {
+ outline("\n.")
+ dotCount = 1
+ }
+ else {
+ outline(".")
+ dotCount += 1
+ }
+ }
+ else {
+ echo(statusLine(state))
+ if (!state.isOk && isDiffy) {
+ val differ = bold(red("% ")) + "diff "
+ state.transcript find (_ startsWith differ) foreach (echo(_))
+ }
+ }
+ }
+
+ def echo(message: String): Unit = synchronized {
+ leftFlush()
+ print(message + "\n")
+ }
+ def chatty(msg: String) = if (isVerbose) echo(msg)
+
+ def echoSkipped(msg: String) = echo(yellow(msg))
+ def echoPassed(msg: String) = echo(bold(green(msg)))
+ def echoFailed(msg: String) = echo(bold(red(msg)))
+ def echoMixed(msg: String) = echo(bold(yellow(msg)))
+ def echoWarning(msg: String) = echo(bold(red(msg)))
+
def initialize(number: Int) = number match {
case MANY =>
_outline = Console.BOLD + Console.BLACK
_success = Console.BOLD + Console.GREEN
- _failure = Console.BOLD + Console.RED
+ _failure = Console.BOLD + Console.RED
_warning = Console.BOLD + Console.YELLOW
_default = Console.RESET
case SOME =>
@@ -61,10 +144,7 @@ object NestUI {
}
def usage() {
- println("Usage: NestRunner [<options>] [<testfile> ..] [<resfile>]")
- println(" <testfile>: list of files ending in '.scala'")
- println(" <resfile>: a file not ending in '.scala'")
- println(" <options>:")
+ println("Usage: NestRunner [options] [test test ...]")
println
println(" Test categories:")
println(" --all run all tests")
@@ -74,16 +154,12 @@ object NestUI {
println(" --jvm run JVM backend tests")
println(" --res run resident compiler tests")
println(" --scalacheck run ScalaCheck tests")
- println(" --script run script runner tests")
- println(" --shootout run shootout tests")
println(" --instrumented run instrumented tests")
println(" --presentation run presentation compiler tests")
- println(" --grep <expr> run all tests whose source file contains <expr>")
println
println(" Other options:")
println(" --pack pick compiler/reflect/library in build/pack, and run all tests")
- println(" --show-log show log")
- println(" --show-diff show diff between log and check file")
+ println(" --grep <expr> run all tests whose source file contains <expr>")
println(" --failed run only those tests that failed during the last run")
println(" --update-check instead of failing tests with output change, update checkfile. (Use with care!)")
println(" --verbose show progress information")
@@ -100,11 +176,33 @@ object NestUI {
}
var _verbose = false
+ var _debug = false
+ var _terse = false
+ var _diff = false
+ def isVerbose = _verbose
+ def isDebug = _debug
+ def isTerse = _terse
+ def isDiffy = _diff
+
+ def setVerbose() {
+ _verbose = true
+ }
+ def setDebug() {
+ _debug = true
+ }
+ def setTerse() {
+ _terse = true
+ }
+ def setDiffOnFail() {
+ _diff = true
+ }
def verbose(msg: String) {
- if (_verbose) {
- outline("debug: ")
- println(msg)
- }
+ if (isVerbose)
+ System.err.println(msg)
+ }
+ def debug(msg: String) {
+ if (isDebug)
+ System.err.println(msg)
}
}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index 7c005b4f61..030c515947 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -5,10 +5,9 @@
package scala.tools.partest
package nest
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty, propOrNone, propOrElse }
import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.{ Path, File, Directory }
+import scala.tools.nsc.io.{ Path, File, Directory }
+import Path._
object PathSettings {
import PartestDefaults.{ testRootDir, srcDirName }
@@ -18,6 +17,8 @@ object PathSettings {
private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
private def findJar(files: Iterator[File], name: String): Option[File] =
files filter (_ hasExtension "jar") find { _.name startsWith name }
+ private def findJarOrFail(name: String, ds: Directory*): File = findJar(ds flatMap (_.files) iterator, name) getOrElse
+ sys.error(s"'${name}.jar' not found in '${ds map (_.path) mkString ", "}'.")
// Directory <root>/test
lazy val testRoot: Directory = testRootDir getOrElse {
@@ -26,8 +27,8 @@ object PathSettings {
candidates find isPartestDir getOrElse sys.error("Directory 'test' not found.")
}
- // Directory <root>/test/files
- lazy val srcDir = Directory(testRoot / srcDirName toCanonical)
+ // Directory <root>/test/files or .../scaladoc
+ def srcDir = Directory(testRoot / srcDirName toCanonical)
// Directory <root>/test/files/lib
lazy val srcLibDir = Directory(srcDir / "lib")
@@ -72,8 +73,14 @@ object PathSettings {
sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
}
+ lazy val testInterface: File = findJarOrFail("test-interface", buildPackLibDir, srcLibDir)
+
lazy val diffUtils: File =
findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
+
+ /** The platform-specific support jar, `tools.jar`.
+ */
+ lazy val platformTools: Option[File] = PathResolver.SupplementalLocations.platformTools
}
class PathSettings() {
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 9780e82cd9..734affa153 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -55,7 +55,7 @@ class ReflectiveRunner {
// we hack into the classloader that will become parent classloader for scalac
// this way we ensure that reflective macro lookup will pick correct Code.lift
// it's also used to inject diffutils into the classpath when running partest from the test/partest script
- val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils)
+ val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils, PathSettings.testInterface)
val sepUrls = srcCodeLibAndDiff.map(_.toURI.toURL) ::: fileManager.latestUrls
// this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
val sepLoader = new URLClassLoader(sepUrls.toArray, null)
diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala
new file mode 100644
index 0000000000..a53698eb77
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/Runner.scala
@@ -0,0 +1,901 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala.tools.partest
+package nest
+
+import java.io.{ Console => _, _ }
+import java.net.URL
+import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
+import java.util.concurrent.Executors
+import java.util.concurrent.TimeUnit.NANOSECONDS
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.duration.Duration
+import scala.io.Codec
+import scala.reflect.internal.FatalError
+import scala.sys.process.{ Process, ProcessLogger }
+import scala.tools.nsc.Properties.{ envOrElse, isWin, jdkHome, javaHome, propOrElse, propOrEmpty, setProp }
+import scala.tools.nsc.{ Settings, CompilerCommand, Global }
+import scala.tools.nsc.io.{ AbstractFile, PlainFile }
+import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.nsc.util.{ Exceptional, ScalaClassLoader, stackTraceString }
+import scala.tools.scalap.Main.decompileScala
+import scala.tools.scalap.scalax.rules.scalasig.ByteCode
+import scala.util.{ Try, Success, Failure }
+import ClassPath.{ join, split }
+import PartestDefaults.{ javaCmd, javacCmd }
+import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
+
+trait PartestRunSettings {
+ def gitPath: Path
+ def reportPath: Path
+ def logPath: Path
+
+ def testPaths: List[Path]
+
+ def gitDiffOptions: List[String]
+ def extraScalacOptions: List[String]
+ def extraJavaOptions: List[String]
+}
+
+class TestTranscript {
+ import NestUI.color._
+ private val buf = ListBuffer[String]()
+ private def pass(s: String) = bold(green("% ")) + s
+ private def fail(s: String) = bold(red("% ")) + s
+
+ def add(action: String): this.type = { buf += action ; this }
+ def append(text: String) { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) }
+
+ // Colorize prompts according to pass/fail
+ def fail: List[String] = buf.toList match {
+ case Nil => Nil
+ case xs => (xs.init map pass) :+ fail(xs.last)
+ }
+}
+
+/** Run a single test. Rubber meets road. */
+class Runner(val testFile: File, fileManager: FileManager, val testRunParams: TestRunParams) {
+ import fileManager._
+
+ // Override to true to have the outcome of this test displayed
+ // whether it passes or not; in general only failures are reported,
+ // except for a . per passing test to show progress.
+ def isEnumeratedTest = false
+
+ private var _lastState: TestState = null
+ private var _transcript = new TestTranscript
+
+ def lastState = if (_lastState == null) Uninitialized(testFile) else _lastState
+ def setLastState(s: TestState) = _lastState = s
+ def transcript: List[String] = _transcript.fail ++ logFile.fileLines
+ def pushTranscript(msg: String) = _transcript add msg
+
+ val parentFile = testFile.getParentFile
+ val kind = parentFile.getName
+ val fileBase = basename(testFile.getName)
+ val logFile = new File(parentFile, s"$fileBase-$kind.log")
+ val outFile = logFile changeExtension "obj"
+ val checkFile = testFile changeExtension "check"
+ val flagsFile = testFile changeExtension "flags"
+ val testIdent = testFile.testIdent // e.g. pos/t1234
+
+ lazy val outDir = { outFile.mkdirs() ; outFile }
+
+ type RanOneTest = (Boolean, LogContext)
+
+ def showCrashInfo(t: Throwable) {
+ System.err.println("Crashed running test $testIdent: " + t)
+ if (!isPartestTerse)
+ System.err.println(stackTraceString(t))
+ }
+ protected def crashHandler: PartialFunction[Throwable, TestState] = {
+ case t: InterruptedException =>
+ genTimeout()
+ case t: Throwable =>
+ showCrashInfo(t)
+ logFile.appendAll(stackTraceString(t))
+ genCrash(t)
+ }
+
+ def genPass() = Pass(testFile)
+ def genFail(reason: String) = Fail(testFile, reason, _transcript.fail)
+ def genTimeout() = Fail(testFile, "timed out", _transcript.fail)
+ def genCrash(caught: Throwable) = Crash(testFile, caught, _transcript.fail)
+ def genUpdated() = Updated(testFile)
+
+ def speclib = PathSettings.srcSpecLib.toString // specialization lib
+ def codelib = PathSettings.srcCodeLib.toString // reify lib
+
+ // Prepend to a classpath, but without incurring duplicate entries
+ def prependTo(classpath: String, path: String): String = {
+ val segments = ClassPath split classpath
+
+ if (segments startsWith path) classpath
+ else ClassPath.join(path :: segments distinct: _*)
+ }
+
+ def prependToJavaClasspath(path: String) {
+ val jcp = sys.props.getOrElse("java.class.path", "")
+ prependTo(jcp, path) match {
+ case `jcp` =>
+ case cp => sys.props("java.class.path") = cp
+ }
+ }
+ def prependToClasspaths(s: Settings, path: String) {
+ prependToJavaClasspath(path)
+ val scp = s.classpath.value
+ prependTo(scp, path) match {
+ case `scp` =>
+ case cp => s.classpath.value = cp
+ }
+ }
+
+ private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
+
+ def javac(files: List[File]): TestState = {
+ // compile using command-line javac compiler
+ val args = Seq(
+ javacCmd,
+ "-d",
+ outDir.getAbsolutePath,
+ "-classpath",
+ join(outDir.toString, CLASSPATH)
+ ) ++ files.map(_.getAbsolutePath)
+
+ pushTranscript(args mkString " ")
+ val captured = StreamCapture(runCommand(args, logFile))
+ if (captured.result) genPass() else {
+ logFile appendAll captured.stderr
+ genFail("java compilation failed")
+ }
+ }
+
+ def testPrompt = kind match {
+ case "res" => "nsc> "
+ case _ => "% "
+ }
+
+ /** Evaluate an action body and update the test state.
+ * @param failFn optionally map a result to a test state.
+ */
+ def nextTestAction[T](body: => T)(failFn: PartialFunction[T, TestState]): T = {
+ val result = body
+ setLastState( if (failFn isDefinedAt result) failFn(result) else genPass() )
+ result
+ }
+ def nextTestActionExpectTrue(reason: String, body: => Boolean): Boolean = (
+ nextTestAction(body) { case false => genFail(reason) }
+ )
+ def nextTestActionFailing(reason: String): Boolean = nextTestActionExpectTrue(reason, false)
+
+ private def assembleTestCommand(outDir: File, logFile: File): List[String] = {
+ // check whether there is a ".javaopts" file
+ val argsFile = testFile changeExtension "javaopts"
+ val argString = file2String(argsFile)
+ if (argString != "")
+ NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
+
+ val testFullPath = testFile.getAbsolutePath
+
+ // Note! As this currently functions, JAVA_OPTS must precede argString
+ // because when an option is repeated to java only the last one wins.
+ // That means until now all the .javaopts files were being ignored because
+ // they all attempt to change options which are also defined in
+ // partest.java_opts, leading to debug output like:
+ //
+ // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
+ // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
+ val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
+ val propertyOptions = List(
+ "-Dfile.encoding=UTF-8",
+ "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
+ "-Dpartest.output="+outDir.getAbsolutePath,
+ "-Dpartest.lib="+LATEST_LIB,
+ "-Dpartest.reflect="+LATEST_REFLECT,
+ "-Dpartest.cwd="+outDir.getParent,
+ "-Dpartest.test-path="+testFullPath,
+ "-Dpartest.testname="+fileBase,
+ "-Djavacmd="+javaCmd,
+ "-Djavaccmd="+javacCmd,
+ "-Duser.language=en",
+ "-Duser.country=US"
+ ) ++ extras
+
+ val classpath = if (extraClasspath != "") join(extraClasspath, CLASSPATH) else CLASSPATH
+
+ javaCmd +: (
+ (JAVA_OPTS.split(' ') ++ extraJavaOptions.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "").toList ++ Seq(
+ "-classpath",
+ join(outDir.toString, classpath)
+ ) ++ propertyOptions ++ Seq(
+ "scala.tools.nsc.MainGenericRunner",
+ "-usejavacp",
+ "Test",
+ "jvm"
+ )
+ )
+ }
+
+ /** Runs command redirecting standard out and
+ * error out to output file.
+ */
+ private def runCommand(args: Seq[String], outFile: File): Boolean = {
+ //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
+ val pl = ProcessLogger(outFile)
+ val nonzero = 17 // rounding down from 17.3
+ def run: Int = {
+ val p = Process(args) run pl
+ try p.exitValue
+ catch {
+ case e: InterruptedException =>
+ NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
+ p.destroy
+ nonzero
+ case t: Throwable =>
+ NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
+ p.destroy
+ throw t
+ }
+ finally pl.close()
+ }
+ (pl buffer run) == 0
+ }
+
+ private def execTest(outDir: File, logFile: File): Boolean = {
+ val cmd = assembleTestCommand(outDir, logFile)
+
+ pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName)
+ nextTestAction(runCommand(cmd, logFile)) {
+ case false =>
+ _transcript append EOL + logFile.fileContents
+ genFail("non-zero exit code")
+ }
+ }
+
+ override def toString = s"""Test($testIdent, lastState = $lastState)"""
+
+ // result is unused
+ def newTestWriters() = {
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr, true)
+ // diff = ""
+
+ ((swr, wr))
+ }
+
+ def fail(what: Any) = {
+ NestUI.verbose("scalac: compilation of "+what+" failed\n")
+ false
+ }
+
+ /** Filter the diff for conditional blocks.
+ * The check file can contain lines of the form:
+ * `#partest java7`
+ * where the line contains a conventional flag name.
+ * In the diff output, these lines have the form:
+ * `> #partest java7`
+ * Blocks which don't apply are filtered out,
+ * and what remains is the desired diff.
+ * Line edit commands such as `0a1,6` don't count
+ * as diff, so return a nonempty diff only if
+ * material diff output was seen.
+ * Filtering the diff output (instead of every check
+ * file) means that we only post-process a test that
+ * might be failing, in the normal case.
+ */
+ def diffilter(d: String) = {
+ import scala.util.Properties.{javaVersion, isAvian}
+ val prefix = "#partest"
+ val margin = "> "
+ val leader = margin + prefix
+ // use lines in block so labeled? Default to sorry, Charlie.
+ def retainOn(f: String) = {
+ val (invert, token) =
+ if (f startsWith "!") (true, f drop 1) else (false, f)
+ val cond = token match {
+ case "java7" => javaVersion startsWith "1.7"
+ case "java6" => javaVersion startsWith "1.6"
+ case "avian" => isAvian
+ case "true" => true
+ case _ => false
+ }
+ if (invert) !cond else cond
+ }
+ if (d contains prefix) {
+ val sb = new StringBuilder
+ var retain = true // use the current line
+ var material = false // saw a line of diff
+ for (line <- d.lines)
+ if (line startsWith leader) {
+ val rest = (line stripPrefix leader).trim
+ retain = retainOn(rest)
+ } else if (retain) {
+ if (line startsWith margin) material = true
+ sb ++= line
+ sb ++= EOL
+ }
+ if (material) sb.toString else ""
+ } else d
+ }
+
+ def currentDiff = (
+ if (checkFile.canRead) diffilter(compareFiles(logFile, checkFile))
+ else compareContents(augmentString(file2String(logFile)).lines.toList, Nil)
+ )
+
+ val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead)
+ val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options")
+ // --color=always --word-diff
+
+ def gitDiff(f1: File, f2: File): Option[String] = {
+ try gitRunner map { git =>
+ val cmd = s"$git diff $gitDiffOptions $f1 $f2"
+ val diff = Process(cmd).lines_!.drop(4).map(_ + "\n").mkString
+
+ "\n" + diff
+ }
+ catch { case t: Exception => None }
+ }
+
+ /** Normalize the log output by applying test-specific filters
+ * and fixing filesystem-specific paths.
+ *
+ * Line filters are picked up from `filter: pattern` at the top of sources.
+ * The filtered line is detected with a simple "contains" test,
+ * and yes, "filter" means "filter out" in this context.
+ *
+ * File paths are detected using the absolute path of the test root.
+ * A string that looks like a file path is normalized by replacing
+ * the leading segments (the root) with "$ROOT" and by replacing
+ * any Windows backslashes with the one true file separator char.
+ */
+ def normalizeLog() {
+ // Apply judiciously; there are line comments in the "stub implementations" error output.
+ val slashes = """[/\\]+""".r
+ def squashSlashes(s: String) = slashes replaceAllIn (s, "/")
+
+ // this string identifies a path and is also snipped from log output.
+ // to preserve more of the path, could use fileManager.testRootPath
+ val elided = parentFile.getAbsolutePath
+
+ // something to mark the elision in the log file (disabled)
+ val ellipsis = "" //".../" // using * looks like a comment
+
+ // no spaces in test file paths below root, because otherwise how to detect end of path string?
+ val pathFinder = raw"""(?i)\Q${elided}${File.separator}\E([\${File.separator}\w]*)""".r
+ def canonicalize(s: String): String = (
+ pathFinder replaceAllIn (s, m => ellipsis + squashSlashes(m group 1))
+ )
+
+ def masters = {
+ val files = List(new File(parentFile, "filters"), new File(PathSettings.srcDir.path, "filters"))
+ files filter (_.exists) flatMap (_.fileLines) map (_.trim) filter (s => !(s startsWith "#"))
+ }
+ val filters = toolArgs("filter", split = false) ++ masters
+ val elisions = ListBuffer[String]()
+ //def lineFilter(s: String): Boolean = !(filters exists (s contains _))
+ def lineFilter(s: String): Boolean = (
+ filters map (_.r) forall { r =>
+ val res = (r findFirstIn s).isEmpty
+ if (!res) elisions += s
+ res
+ }
+ )
+
+ logFile.mapInPlace(canonicalize)(lineFilter)
+ if (isPartestVerbose && elisions.nonEmpty) {
+ import NestUI.color._
+ val emdash = bold(yellow("--"))
+ pushTranscript(s"filtering ${logFile.getName}$EOL${elisions mkString (emdash, EOL + emdash, EOL)}")
+ }
+ }
+
+ def diffIsOk: Boolean = {
+ // always normalize the log first
+ normalizeLog()
+ val diff = currentDiff
+ // if diff is not empty, is update needed?
+ val updating: Option[Boolean] = (
+ if (diff == "") None
+ else Some(fileManager.updateCheck)
+ )
+ pushTranscript(s"diff $logFile $checkFile")
+ nextTestAction(updating) {
+ case Some(true) =>
+ NestUI.verbose("Updating checkfile " + checkFile)
+ checkFile writeAll file2String(logFile)
+ genUpdated()
+ case Some(false) =>
+ // Get a word-highlighted diff from git if we can find it
+ val bestDiff = if (updating.isEmpty) "" else {
+ if (checkFile.canRead)
+ gitDiff(logFile, checkFile) getOrElse {
+ s"diff $logFile $checkFile\n$diff"
+ }
+ else diff
+ }
+ _transcript append bestDiff
+ genFail("output differs")
+ // TestState.fail("output differs", "output differs",
+ // genFail("output differs")
+ // TestState.Fail("output differs", bestDiff)
+ case None => genPass() // redundant default case
+ } getOrElse true
+ }
+
+ /** 1. Creates log file and output directory.
+ * 2. Runs script function, providing log file and output directory as arguments.
+ * 2b. or, just run the script without context and return a new context
+ */
+ def runInContext(body: => Boolean): (Boolean, LogContext) = {
+ val (swr, wr) = newTestWriters()
+ val succeeded = body
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ /** Grouped files in group order, and lex order within each group. */
+ def groupedFiles(sources: List[File]): List[List[File]] = (
+ if (sources.tail.nonEmpty) {
+ val grouped = sources groupBy (_.group)
+ grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName))
+ }
+ else List(sources)
+ )
+
+ /** Source files for the given test file. */
+ def sources(file: File): List[File] = (
+ if (file.isDirectory)
+ file.listFiles.toList filter (_.isJavaOrScala)
+ else
+ List(file)
+ )
+
+ def newCompiler = new DirectCompiler(fileManager)
+
+ def attemptCompile(sources: List[File]): TestState = {
+ val state = newCompiler.compile(this, flagsForCompilation(sources), sources)
+ if (!state.isOk)
+ _transcript append ("\n" + file2String(logFile))
+
+ state
+ }
+
+ // snort or scarf all the contributing flags files
+ def flagsForCompilation(sources: List[File]): List[String] = {
+ def argsplitter(s: String) = words(s) filter (_.nonEmpty)
+ val perTest = argsplitter(flagsFile.fileContents)
+ val perGroup = if (testFile.isDirectory) {
+ sources flatMap { f => SFile(Path(f) changeExtension "flags").safeSlurp map argsplitter getOrElse Nil }
+ } else Nil
+ perTest ++ perGroup
+ }
+
+ def toolArgs(tool: String, split: Boolean = true): List[String] = {
+ def argsplitter(s: String) = if (split) words(s) filter (_.nonEmpty) else List(s)
+ def argsFor(f: File): List[String] = {
+ import scala.util.matching.Regex
+ val p = new Regex(s"(?:.*\\s)?${tool}:(?:\\s*)(.*)?", "args")
+ val max = 10
+ val src = Path(f).toFile.chars(codec)
+ val args = try {
+ src.getLines take max collectFirst {
+ case s if (p findFirstIn s).nonEmpty => for (m <- p findFirstMatchIn s) yield m group "args"
+ }
+ } finally src.close()
+ args.flatten map argsplitter getOrElse Nil
+ }
+ sources(testFile) flatMap argsFor
+ }
+
+ abstract class CompileRound {
+ def fs: List[File]
+ def result: TestState
+ def description: String
+
+ def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " "
+ def isOk = result.isOk
+ def mkScalacString(): String = {
+ val flags = file2String(flagsFile) match {
+ case "" => ""
+ case s => " " + s
+ }
+ s"""scalac $fsString"""
+ }
+ override def toString = description + ( if (result.isOk) "" else "\n" + result.status )
+ }
+ case class OnlyJava(fs: List[File]) extends CompileRound {
+ def description = s"""javac $fsString"""
+ lazy val result = { pushTranscript(description) ; javac(fs) }
+ }
+ case class OnlyScala(fs: List[File]) extends CompileRound {
+ def description = mkScalacString()
+ lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
+ }
+ case class ScalaAndJava(fs: List[File]) extends CompileRound {
+ def description = mkScalacString()
+ lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
+ }
+
+ def compilationRounds(file: File): List[CompileRound] = (
+ (groupedFiles(sources(file)) map mixedCompileGroup).flatten
+ )
+ def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = {
+ val (scalaFiles, javaFiles) = allFiles partition (_.isScala)
+ val isMixed = javaFiles.nonEmpty && scalaFiles.nonEmpty
+ val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles))
+ val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles))
+ val round3 = if (!isMixed) None else Some(OnlyScala(scalaFiles))
+
+ List(round1, round2, round3).flatten
+ }
+
+ def runNegTest() = runInContext {
+ val rounds = compilationRounds(testFile)
+
+ // failing means Does Not Compile
+ val failing = rounds find (x => nextTestActionExpectTrue("compilation failed", x.isOk) == false)
+
+ // which means passing if it checks and didn't crash the compiler
+ // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
+ def checked(r: CompileRound) = r.result match {
+ case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => false
+ case _ => diffIsOk
+ }
+
+ failing map (checked) getOrElse nextTestActionFailing("expected compilation failure")
+ }
+
+ def runTestCommon(andAlso: => Boolean): (Boolean, LogContext) = runInContext {
+ compilationRounds(testFile).forall(x => nextTestActionExpectTrue("compilation failed", x.isOk)) && andAlso
+ }
+
+ // Apache Ant 1.6 or newer
+ def ant(args: Seq[String], output: File): Boolean = {
+ val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
+ val antLibDir = Directory(antDir / "lib")
+ val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
+ val antOptions =
+ if (NestUI._verbose) List("-verbose", "-noinput")
+ else List("-noinput")
+ val cmd = javaCmd +: (
+ JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
+ "-classpath",
+ antLauncherPath,
+ "org.apache.tools.ant.launch.Launcher"
+ ) ++ antOptions ++ args
+ )
+
+ runCommand(cmd, output)
+ }
+
+ def runAntTest(): (Boolean, LogContext) = {
+ val (swr, wr) = newTestWriters()
+
+ val succeeded = try {
+ val binary = "-Dbinary="+(
+ if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
+ else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
+ else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
+ else "installed"
+ )
+ val args = Array(binary, "-logfile", logFile.getPath, "-file", testFile.getPath)
+ NestUI.verbose("ant "+args.mkString(" "))
+
+ pushTranscript(s"ant ${args.mkString(" ")}")
+ nextTestActionExpectTrue("ant failed", ant(args, logFile)) && diffIsOk
+ }
+ catch { // *catch-all*
+ case e: Exception =>
+ NestUI.warning("caught "+e)
+ false
+ }
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ def extraClasspath = kind match {
+ case "specialized" => PathSettings.srcSpecLib.toString
+ case _ => ""
+ }
+ def extraJavaOptions = kind match {
+ case "instrumented" => "-javaagent:"+PathSettings.instrumentationAgentLib
+ case _ => ""
+ }
+
+ def runScalacheckTest() = runTestCommon {
+ NestUI verbose f"compilation of $testFile succeeded%n"
+
+ // this classloader is test specific: its parent contains library classes and others
+ val loader = {
+ import PathSettings.scalaCheck
+ val locations = List(outDir, scalaCheck.jfile) map (_.getAbsoluteFile.toURI.toURL)
+ ScalaClassLoader.fromURLs(locations, getClass.getClassLoader)
+ }
+ val logWriter = new PrintStream(new FileOutputStream(logFile), true)
+
+ def runInFramework(): Boolean = {
+ import org.scalatools.testing._
+ val f: Framework = loader.instantiate[Framework]("org.scalacheck.ScalaCheckFramework")
+ val logger = new Logger {
+ def ansiCodesSupported = false //params.env.isSet("colors")
+ def error(msg: String) = logWriter println msg
+ def warn(msg: String) = logWriter println msg
+ def info(msg: String) = logWriter println msg
+ def debug(msg: String) = logWriter println msg
+ def trace(t: Throwable) = t printStackTrace logWriter
+ }
+ var bad = 0
+ val handler = new EventHandler {
+ // testName, description, result, error
+ // Result = Success, Failure, Error, Skipped
+ def handle(event: Event): Unit = event.result match {
+ case Result.Success =>
+ //case Result.Skipped => // an exhausted test is skipped, therefore bad
+ case _ => bad += 1
+ }
+ }
+ val loggers = Array(logger)
+ val r = f.testRunner(loader, loggers).asInstanceOf[Runner2] // why?
+ val claas = "Test"
+ val fingerprint = f.tests collectFirst { case x: SubclassFingerprint if x.isModule => x }
+ val args = toolArgs("scalacheck")
+ vlog(s"Run $testFile with args $args")
+ // set the context class loader for scaladoc/scalacheck tests (FIX ME)
+ ScalaClassLoader(testRunParams.scalaCheckParentClassLoader).asContext {
+ r.run(claas, fingerprint.get, handler, args.toArray) // synchronous?
+ }
+ val ok = (bad == 0)
+ if (!ok) _transcript append logFile.fileContents
+ ok
+ }
+ try nextTestActionExpectTrue("ScalaCheck test failed", runInFramework()) finally logWriter.close()
+ }
+
+ def runResidentTest() = {
+ // simulate resident compiler loop
+ val prompt = "\nnsc> "
+ val (swr, wr) = newTestWriters()
+
+ NestUI.verbose(this+" running test "+fileBase)
+ val dir = parentFile
+ val resFile = new File(dir, fileBase + ".res")
+
+ // run compiler in resident mode
+ // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
+ val sourcedir = logFile.getParentFile.getAbsoluteFile
+ val sourcepath = sourcedir.getAbsolutePath+File.separator
+ NestUI.verbose("sourcepath: "+sourcepath)
+
+ val argList = List(
+ "-d", outDir.getAbsoluteFile.getPath,
+ "-Xresident",
+ "-sourcepath", sourcepath)
+
+ // configure input/output files
+ val logOut = new FileOutputStream(logFile)
+ val logWriter = new PrintStream(logOut, true)
+ val resReader = new BufferedReader(new FileReader(resFile))
+ val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
+
+ // create compiler
+ val settings = new Settings(workerError)
+ settings.sourcepath.value = sourcepath
+ settings.classpath.value = fileManager.CLASSPATH
+ val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
+ val command = new CompilerCommand(argList, settings)
+ object compiler extends Global(command.settings, reporter)
+
+ def resCompile(line: String): Boolean = {
+ // NestUI.verbose("compiling "+line)
+ val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
+ // NestUI.verbose("cmdArgs: "+cmdArgs)
+ val sett = new Settings(workerError)
+ sett.sourcepath.value = sourcepath
+ val command = new CompilerCommand(cmdArgs, sett)
+ // "scalac " + command.files.mkString(" ")
+ pushTranscript("scalac " + command.files.mkString(" "))
+ nextTestActionExpectTrue(
+ "compilation failed",
+ command.ok && {
+ (new compiler.Run) compile command.files
+ !reporter.hasErrors
+ }
+ )
+ }
+ def loop(): Boolean = {
+ logWriter.print(prompt)
+ resReader.readLine() match {
+ case null | "" => logWriter.close() ; true
+ case line => resCompile(line) && loop()
+ }
+ }
+ // res/t687.res depends on ignoring its compilation failure
+ // and just looking at the diff, so I made them all do that
+ // because this is long enough.
+ if (!Output.withRedirected(logWriter)(try loop() finally resReader.close()))
+ setLastState(genPass())
+
+ (diffIsOk, LogContext(logFile, swr, wr))
+ }
+
+ def run(): TestState = {
+ if (kind == "neg" || (kind endsWith "-neg")) runNegTest()
+ else kind match {
+ case "pos" => runTestCommon(true)
+ case "ant" => runAntTest()
+ case "scalacheck" => runScalacheckTest()
+ case "res" => runResidentTest()
+ case "scalap" => runScalapTest()
+ case "script" => runScriptTest()
+ case _ => runTestCommon(execTest(outDir, logFile) && diffIsOk)
+ }
+
+ lastState
+ }
+
+ def runScalapTest() = runTestCommon {
+ val isPackageObject = testFile.getName startsWith "package"
+ val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package")
+ val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader)
+ val byteCode = ByteCode forClass (loader loadClass className)
+ val result = decompileScala(byteCode.bytes, isPackageObject)
+
+ logFile writeAll result
+ diffIsOk
+ }
+ def runScriptTest() = {
+ import scala.sys.process._
+ val (swr, wr) = newTestWriters()
+
+ val args = file2String(testFile changeExtension "args")
+ val cmdFile = if (isWin) testFile changeExtension "bat" else testFile
+ val succeeded = (((cmdFile + " " + args) #> logFile !) == 0) && diffIsOk
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ def cleanup() {
+ if (lastState.isOk)
+ logFile.delete()
+ if (!isPartestDebug)
+ Directory(outDir).deleteRecursively()
+ }
+}
+
+case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
+
+/** Extended by Ant- and ConsoleRunner for running a set of tests. */
+trait DirectRunner {
+ def fileManager: FileManager
+
+ import PartestDefaults.{ numThreads, waitTime }
+
+ Thread.setDefaultUncaughtExceptionHandler(
+ new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, t: Throwable) {
+ val t1 = Exceptional unwrap t
+ System.err.println(s"Uncaught exception on thread $thread: $t1")
+ t1.printStackTrace()
+ }
+ }
+ )
+ def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = {
+
+ NestUI.resetTestNumber(kindFiles.size)
+
+ // this special class loader is for the benefit of scaladoc tests, which need a class path
+ import PathSettings.{ testInterface, scalaCheck }
+ val allUrls = scalaCheck.toURL :: testInterface.toURL :: fileManager.latestUrls
+ val parentClassLoader = ScalaClassLoader fromURLs allUrls
+ // add scalacheck.jar to a special classloader, but use our loader as parent with test-interface
+ //val parentClassLoader = ScalaClassLoader fromURLs (List(scalaCheck.toURL), getClass().getClassLoader)
+ val pool = Executors newFixedThreadPool numThreads
+ val manager = new RunnerManager(kind, fileManager, TestRunParams(parentClassLoader))
+ val futures = kindFiles map (f => pool submit callable(manager runTest f))
+
+ pool.shutdown()
+ Try (pool.awaitTermination(waitTime) {
+ throw TimeoutException(waitTime)
+ }) match {
+ case Success(_) => futures map (_.get)
+ case Failure(e) =>
+ e match {
+ case TimeoutException(d) =>
+ NestUI warning "Thread pool timeout elapsed before all tests were complete!"
+ case ie: InterruptedException =>
+ NestUI warning "Thread pool was interrupted"
+ ie.printStackTrace()
+ }
+ pool.shutdownNow() // little point in continuing
+ // try to get as many completions as possible, in case someone cares
+ val results = for (f <- futures) yield {
+ try {
+ Some(f.get(0, NANOSECONDS))
+ } catch {
+ case _: Throwable => None
+ }
+ }
+ results.flatten
+ }
+ }
+}
+
+case class TimeoutException(duration: Duration) extends RuntimeException
+
+class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
+
+object LogContext {
+ def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
+ require (file != null)
+ new LogContext(file, Some((swr, wr)))
+ }
+ def apply(file: File): LogContext = new LogContext(file, None)
+}
+
+object Output {
+ object outRedirect extends Redirecter(out)
+ object errRedirect extends Redirecter(err)
+
+ System.setOut(outRedirect)
+ System.setErr(errRedirect)
+
+ import scala.util.DynamicVariable
+ private def out = java.lang.System.out
+ private def err = java.lang.System.err
+ private val redirVar = new DynamicVariable[Option[PrintStream]](None)
+
+ class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
+ def write(b: Int) = withStream(_ write b)
+
+ private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
+
+ override def write(b: Array[Byte]) = withStream(_ write b)
+ override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
+ override def flush = withStream(_.flush)
+ override def close = withStream(_.close)
+ })
+
+ // this supports thread-safe nested output redirects
+ def withRedirected[T](newstream: PrintStream)(func: => T): T = {
+ // note down old redirect destination
+ // this may be None in which case outRedirect and errRedirect print to stdout and stderr
+ val saved = redirVar.value
+ // set new redirecter
+ // this one will redirect both out and err to newstream
+ redirVar.value = Some(newstream)
+
+ try func
+ finally {
+ newstream.flush()
+ redirVar.value = saved
+ }
+ }
+}
+
+/** Use a Runner to run a test. */
+class RunnerManager(kind: String, fileManager: FileManager, params: TestRunParams) {
+ import fileManager._
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
+
+ def runTest(testFile: File): TestState = {
+ val runner = new Runner(testFile, fileManager, params)
+
+ // when option "--failed" is provided execute test only if log
+ // is present (which means it failed before)
+ if (fileManager.failed && !runner.logFile.canRead)
+ runner.genPass()
+ else {
+ val (state, elapsed) =
+ try timed(runner.run())
+ catch {
+ case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
+ }
+ NestUI.reportTest(state)
+ runner.cleanup()
+ state
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
deleted file mode 100644
index a63a81c47b..0000000000
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ /dev/null
@@ -1,764 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import java.io._
-import java.net.URL
-import java.util.{ Timer, TimerTask }
-
-import scala.tools.nsc.Properties.{ jdkHome, javaHome, propOrElse }
-import scala.util.Properties.{ envOrElse, isWin }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceString }
-import ClassPath.{ join, split }
-import scala.tools.scalap.scalax.rules.scalasig.ByteCode
-import scala.collection.{ mutable, immutable }
-import scala.sys.process._
-import java.util.concurrent.{ Executors, TimeUnit, TimeoutException }
-import PartestDefaults.{ javaCmd, javacCmd }
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
- def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
- require (file != null)
- new LogContext(file, Some((swr, wr)))
- }
- def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-object Output {
- object outRedirect extends Redirecter(out)
- object errRedirect extends Redirecter(err)
-
- System.setOut(outRedirect)
- System.setErr(errRedirect)
-
- import scala.util.DynamicVariable
- private def out = java.lang.System.out
- private def err = java.lang.System.err
- private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
- class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
- def write(b: Int) = withStream(_ write b)
-
- private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
- override def write(b: Array[Byte]) = withStream(_ write b)
- override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
- override def flush = withStream(_.flush)
- override def close = withStream(_.close)
- })
-
- // this supports thread-safe nested output redirects
- def withRedirected[T](newstream: PrintStream)(func: => T): T = {
- // note down old redirect destination
- // this may be None in which case outRedirect and errRedirect print to stdout and stderr
- val saved = redirVar.value
- // set new redirecter
- // this one will redirect both out and err to newstream
- redirVar.value = Some(newstream)
-
- try func
- finally {
- newstream.flush()
- redirVar.value = saved
- }
- }
-}
-
-class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunParams) {
- import fileManager._
-
- val compileMgr = new CompileManager(fileManager)
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
-
- private def compareFiles(f1: File, f2: File): String =
- try fileManager.compareFiles(f1, f2)
- catch { case t: Exception => t.toString }
-
- /** This does something about absolute paths and file separator
- * chars before diffing.
- */
- private def replaceSlashes(dir: File, s: String): String = {
- val base = (dir.getAbsolutePath + File.separator).replace('\\', '/')
- var regex = """\Q%s\E""" format base
- if (isWin) regex = "(?i)" + regex
- s.replace('\\', '/').replaceAll(regex, "")
- }
-
- private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
-
- private def printInfoStart(file: File, printer: PrintWriter) {
- NestUI.outline("testing: ", printer)
- val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
- val testdir = filesdir.getParentFile
- val totalWidth = 56
- val name = {
- // 1. try with [...]/files/run/test.scala
- val name = file.getAbsolutePath drop testdir.getAbsolutePath.length
- if (name.length <= totalWidth) name
- // 2. try with [...]/run/test.scala
- else file.getAbsolutePath drop filesdir.getAbsolutePath.length
- }
- NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer)
- }
-
- private def printInfoEnd(success: Boolean, printer: PrintWriter) {
- NestUI.normal("[", printer)
- if (success) NestUI.success(" OK ", printer)
- else NestUI.failure("FAILED", printer)
- NestUI.normal("]\n", printer)
- }
-
- private def printInfoTimeout(printer: PrintWriter) {
- NestUI.normal("[", printer)
- NestUI.failure("TIMOUT", printer)
- NestUI.normal("]\n", printer)
- }
-
- private def javac(outDir: File, files: List[File], output: File): CompilationOutcome = {
- // compile using command-line javac compiler
- val args = Seq(
- javacCmd,
- "-d",
- outDir.getAbsolutePath,
- "-classpath",
- join(outDir.toString, CLASSPATH)
- ) ++ files.map("" + _)
-
- try if (runCommand(args, output)) CompileSuccess else CompileFailed
- catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n", CompilerCrashed)
- }
-
- /** Runs command redirecting standard out and error out to output file.
- * Overloaded to accept a sequence of arguments.
- */
- private def runCommand(args: Seq[String], outFile: File): Boolean = {
- NestUI.verbose("running command:\n"+args.map(" " + _ + "\n").mkString)
- runCommandImpl(Process(args), outFile)
- }
-
- /** Runs command redirecting standard out and error out to output file.
- * Overloaded to accept a single string = concatenated command + arguments.
- */
- private def runCommand(command: String, outFile: File): Boolean = {
- NestUI.verbose("running command:"+command)
- runCommandImpl(Process(command), outFile)
- }
-
- private def runCommandImpl(process: => ProcessBuilder, outFile: File): Boolean = {
- val exitCode = (process #> outFile !)
- // normalize line endings
- // System.getProperty("line.separator") should be "\n" here
- // so reading a file and writing it back should convert all CRLFs to LFs
- SFile(outFile).printlnAll(SFile(outFile).lines.toList: _*)
- exitCode == 0
- }
-
- @inline private def isJava(f: File) = SFile(f) hasExtension "java"
- @inline private def isScala(f: File) = SFile(f) hasExtension "scala"
- @inline private def isJavaOrScala(f: File) = isJava(f) || isScala(f)
-
- private def outputLogFile(logFile: File) {
- val lines = SFile(logFile).lines
- if (lines.nonEmpty) {
- NestUI.normal("Log file '" + logFile + "': \n")
- lines foreach (x => NestUI.normal(x + "\n"))
- }
- }
- private def logStackTrace(logFile: File, t: Throwable, msg: String): Boolean = {
- SFile(logFile).writeAll(msg, stackTraceString(t))
- outputLogFile(logFile) // if running the test threw an exception, output log file
- false
- }
-
- private def exHandler[T](logFile: File, msg: String, value: T): PartialFunction[Throwable, T] = {
- case e: Exception => logStackTrace(logFile, e, msg) ; value
- }
-
- class Runner(testFile: File) {
- var testDiff: String = ""
- var passed: Option[Boolean] = None
-
- val fileBase = basename(testFile.getName)
- val logFile = fileManager.getLogFile(testFile, kind)
- val parent = testFile.getParentFile
- val outDir = new File(parent, "%s-%s.obj".format(fileBase, kind))
- def toDelete = if (isPartestDebug) Nil else List(
- if (passed exists (x => x)) Some(logFile) else None,
- if (outDir.isDirectory) Some(outDir) else None
- ).flatten
-
- private def createOutputDir(): File = {
- outDir.mkdirs()
- outDir
- }
-
- private def execTest(outDir: File, logFile: File, classpathPrefix: String = "", javaOpts: String = ""): Boolean = {
- // check whether there is a ".javaopts" file
- val argsFile = new File(logFile.getParentFile, fileBase + ".javaopts")
- val argString = file2String(argsFile)
- if (argString != "")
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
- val testFullPath = {
- val d = new File(logFile.getParentFile, fileBase)
- if (d.isDirectory) d.getAbsolutePath
- else {
- val f = new File(logFile.getParentFile, fileBase + ".scala")
- if (f.isFile) f.getAbsolutePath
- else ""
- }
- }
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // partest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
- val propertyOptions = List(
- "-Dfile.encoding=UTF-8",
- "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
- "-Dpartest.output="+outDir.getAbsolutePath,
- "-Dpartest.lib="+LATEST_LIB,
- "-Dpartest.reflect="+LATEST_REFLECT,
- "-Dpartest.comp="+LATEST_COMP,
- "-Dpartest.cwd="+outDir.getParent,
- "-Dpartest.test-path="+testFullPath,
- "-Dpartest.testname="+fileBase,
- "-Djavacmd="+javaCmd,
- "-Djavaccmd="+javacCmd,
- "-Duser.language=en",
- "-Duser.country=US"
- ) ++ extras
-
- val classpath = if (classpathPrefix != "") join(classpathPrefix, CLASSPATH) else CLASSPATH
- val cmd = javaCmd +: (
- (JAVA_OPTS.split(' ') ++ javaOpts.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- join(outDir.toString, classpath)
- ) ++ propertyOptions ++ Seq(
- "scala.tools.nsc.MainGenericRunner",
- "-usejavacp",
- "Test",
- "jvm"
- )
- )
-
- runCommand(cmd, logFile)
- }
-
- private def getCheckFilePath(dir: File, suffix: String) = {
- def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile
-
- if (chkFile("").isFile || suffix == "") chkFile("")
- else chkFile("-" + suffix)
- }
-
- private def compareOutput(dir: File, logFile: File): String = {
- val checkFile = getCheckFilePath(dir, kind)
- val diff =
- if (checkFile.canRead) compareFiles(logFile, checkFile.jfile)
- else file2String(logFile)
-
- // if check file exists, compare with log file
- if (diff != "" && fileManager.updateCheck) {
- NestUI.verbose("Updating checkfile " + checkFile.jfile)
- val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "")
- toWrite writeAll file2String(logFile)
- ""
- }
- else diff
- }
-
- def newTestWriters() = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
-
- ((swr, wr))
- }
-
- def diffCheck(testFile: File, diff: String) = {
- testDiff = diff
- testDiff == ""
- }
-
- /** 1. Creates log file and output directory.
- * 2. Runs script function, providing log file and output directory as arguments.
- */
- def runInContext(file: File, script: (File, File) => Boolean): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir()
- NestUI.verbose("output directory: "+outDir)
-
- // run test-specific code
- val succeeded = try {
- if (isPartestDebug) {
- val (result, millis) = timed(script(logFile, outDir))
- fileManager.recordTestTiming(file.getPath, millis)
- result
- }
- else script(logFile, outDir)
- }
- catch exHandler(logFile, "", false)
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def groupedFiles(dir: File): List[List[File]] = {
- val testFiles = dir.listFiles.toList filter isJavaOrScala
-
- def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
- val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted)
- val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted
-
- noGroupSuffix :: groups filterNot (_.isEmpty)
- }
-
- def compileFilesIn(dir: File, logFile: File, outDir: File): CompilationOutcome = {
- def compileGroup(g: List[File]): CompilationOutcome = {
- val (scalaFiles, javaFiles) = g partition isScala
- val allFiles = javaFiles ++ scalaFiles
-
- /* The test can contain both java and scala files, each of which should be compiled with the corresponding
- * compiler. Since the source files can reference each other both ways (java referencing scala classes and
- * vice versa, the partest compilation routine attempts to reach a "bytecode fixpoint" between the two
- * compilers -- that's when bytecode generated by each compiler implements the signatures expected by the other.
- *
- * In theory this property can't be guaranteed, as neither compiler can know what signatures the other
- * compiler expects and how to implement them. (see SI-1240 for the full story)
- *
- * In practice, this happens in 3 steps:
- * STEP1: Feed all the files to scalac if there are also non-Scala sources.
- * It will parse java files and obtain their expected signatures and generate bytecode for scala files
- * STEP2: Feed the java files to javac if there are any.
- * It will generate the bytecode for the java files and link to the scalac-generated bytecode for scala
- * STEP3: (Re-)compile the scala sources so they link to the correct
- * java signatures, in case the signatures deduced by scalac from the source files were wrong. Since the
- * bytecode for java is already in place, we only feed the scala files to scalac so it will take the
- * java signatures from the existing javac-generated bytecode.
- * Note that no artifacts are deleted before this step.
- */
- List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) {
- case (CompileSuccess, 1) if scalaFiles.nonEmpty && javaFiles.nonEmpty =>
- compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile)
- case (CompileSuccess, 2) if javaFiles.nonEmpty =>
- javac(outDir, javaFiles, logFile)
- case (CompileSuccess, 3) if scalaFiles.nonEmpty =>
- // TODO: Do we actually need this? SI-1240 is known to require this, but we don't know if other tests
- // require it: https://groups.google.com/forum/?fromgroups#!topic/scala-internals/rFDKAcOKciU
- compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile)
-
- case (outcome, _) => outcome
- }
- }
- groupedFiles(dir).foldLeft(CompileSuccess: CompilationOutcome) {
- case (CompileSuccess, files) => compileGroup(files)
- case (outcome, _) => outcome
- }
- }
-
- def runTestCommon(file: File, expectFailure: Boolean)(
- onSuccess: (File, File) => Boolean,
- onFail: (File, File) => Unit = (_, _) => ()): (Boolean, LogContext) =
- {
- runInContext(file, (logFile: File, outDir: File) => {
- val outcome = (
- if (file.isDirectory) compileFilesIn(file, logFile, outDir)
- else compileMgr.attemptCompile(None, List(file), kind, logFile)
- )
- val result = (
- if (expectFailure) outcome.isNegative
- else outcome.isPositive
- )
-
- if (result) onSuccess(logFile, outDir)
- else { onFail(logFile, outDir) ; false }
- })
- }
-
- def runJvmTest(file: File): (Boolean, LogContext) =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- execTest(outDir, logFile, PathSettings.srcCodeLib.toString) && {
- // cannot replace paths here since this also inverts slashes
- // which affects a bunch of tests
- //fileManager.mapFile(logFile, replaceSlashes(dir, _))
- diffCheck(file, compareOutput(dir, logFile))
- }
- })
-
- // Apache Ant 1.6 or newer
- def ant(args: Seq[String], output: File): Boolean = {
- val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
- val antLibDir = Directory(antDir / "lib")
- val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
- val antOptions =
- if (NestUI._verbose) List("-verbose", "-noinput")
- else List("-noinput")
- val cmd = javaCmd +: (
- JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- antLauncherPath,
- "org.apache.tools.ant.launch.Launcher"
- ) ++ antOptions ++ args
- )
-
- try runCommand(cmd, output)
- catch exHandler(output, "ant command '" + cmd + "' failed:\n", false)
- }
-
- def runAntTest(file: File): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
-
- val succeeded = try {
- val binary = "-Dbinary="+(
- if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
- else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
- else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
- else "installed"
- )
- val args = Array(binary, "-logfile", logFile.path, "-file", file.path)
- NestUI.verbose("ant "+args.mkString(" "))
- ant(args, logFile) && diffCheck(file, compareOutput(file.getParentFile, logFile))
- }
- catch { // *catch-all*
- case e: Exception =>
- NestUI.verbose("caught "+e)
- false
- }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def runSpecializedTest(file: File): (Boolean, LogContext) =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding the instrumented library to the classpath
- ( execTest(outDir, logFile, PathSettings.srcSpecLib.toString) &&
- diffCheck(file, compareOutput(dir, logFile))
- )
- })
-
- def runInstrumentedTest(file: File): (Boolean, LogContext) =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding the javagent option with path to instrumentation agent
- execTest(outDir, logFile, javaOpts = "-javaagent:"+PathSettings.instrumentationAgentLib) &&
- diffCheck(file, compareOutput(dir, logFile))
- })
-
- def processSingleFile(file: File): (Boolean, LogContext) = kind match {
- case "scalacheck" =>
- val succFn: (File, File) => Boolean = { (logFile, outDir) =>
- NestUI.verbose("compilation of "+file+" succeeded\n")
-
- val outURL = outDir.getAbsoluteFile.toURI.toURL
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
- Output.withRedirected(logWriter) {
- // this classloader is test specific: its parent contains library classes and others
- ScalaClassLoader.fromURLs(List(outURL), params.scalaCheckParentClassLoader).run("Test", Nil)
- }
-
- NestUI.verbose(file2String(logFile))
- // obviously this must be improved upon
- val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer;
- lines.forall(x => !x.startsWith("!")) || {
- NestUI.normal("ScalaCheck test failed. Output:\n")
- lines foreach (x => NestUI.normal(x + "\n"))
- false
- }
- }
- runTestCommon(file, expectFailure = false)(
- succFn,
- (logFile, outDir) => outputLogFile(logFile)
- )
-
- case "pos" =>
- runTestCommon(file, expectFailure = false)(
- (logFile, outDir) => true,
- (_, _) => ()
- )
-
- case "neg" =>
- runTestCommon(file, expectFailure = true)((logFile, outDir) => {
- // compare log file to check file
- val dir = file.getParentFile
-
- // diff is contents of logFile
- fileManager.mapFile(logFile, replaceSlashes(dir, _))
- diffCheck(file, compareOutput(dir, logFile))
- })
-
- case "run" | "jvm" =>
- runJvmTest(file)
-
- case "specialized" =>
- runSpecializedTest(file)
-
- case "instrumented" =>
- runInstrumentedTest(file)
-
- case "presentation" =>
- runJvmTest(file) // for the moment, it's exactly the same as for a run test
-
- case "ant" =>
- runAntTest(file)
-
- case "res" => {
- // simulate resident compiler loop
- val prompt = "\nnsc> "
-
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir()
- val resFile = new File(dir, fileBase + ".res")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- //NestUI.verbose("logFileErr: "+logFileErr)
- NestUI.verbose("resFile: "+resFile)
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getAbsoluteFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argList = List(
- "-d", outDir.getAbsoluteFile.getPath,
- "-Xresident",
- "-sourcepath", sourcepath)
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut, true)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
- // create compiler
- val settings = new Settings(workerError)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings)
- object compiler extends Global(command.settings, reporter)
-
- val resCompile = (line: String) => {
- NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(workerError)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett)
- command.ok && {
- (new compiler.Run) compile command.files
- !reporter.hasErrors
- }
- }
-
- def loop(action: String => Boolean): Boolean = {
- logWriter.print(prompt)
- resReader.readLine() match {
- case null | "" => logWriter.flush() ; true
- case line => action(line) && loop(action)
- }
- }
-
- Output.withRedirected(logWriter) {
- try loop(resCompile)
- finally resReader.close()
- }
- fileManager.mapFile(logFile, replaceSlashes(dir, _))
-
- (diffCheck(file, compareOutput(dir, logFile)), LogContext(logFile, swr, wr))
- }
-
- case "shootout" =>
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir()
-
- // 2. define file {outDir}/test.scala that contains code to compile/run
- val testFile = new File(outDir, "test.scala")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- NestUI.verbose("testFile: "+testFile)
-
- // 3. cat {test}.scala.runner {test}.scala > testFile
- val runnerFile = new File(parent, fileBase+".scala.runner")
- val bodyFile = new File(parent, fileBase+".scala")
- SFile(testFile).writeAll(
- file2String(runnerFile),
- file2String(bodyFile)
- )
-
- // 4. compile testFile
- val ok = compileMgr.attemptCompile(None, List(testFile), kind, logFile) eq CompileSuccess
- NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed"))
- val result = ok && {
- execTest(outDir, logFile) && {
- NestUI.verbose(this+" finished running "+fileBase)
- diffCheck(file, compareOutput(parent, logFile))
- }
- }
-
- (result, LogContext(logFile, swr, wr))
-
- case "scalap" =>
- runInContext(file, (logFile: File, outDir: File) => {
- val sourceDir = Directory(if (file.isFile) file.getParent else file)
- val sources = sourceDir.files filter (_ hasExtension "scala") map (_.jfile) toList
- val results = sourceDir.files filter (_.name == "result.test") map (_.jfile) toList
-
- if (sources.length != 1 || results.length != 1) {
- NestUI.warning("Misconfigured scalap test directory: " + sourceDir + " \n")
- false
- }
- else {
- val resFile = results.head
- // 2. Compile source file
-
- if (!compileMgr.attemptCompile(Some(outDir), sources, kind, logFile).isPositive) {
- NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir))
- false
- }
- else {
- // 3. Decompile file and compare results
- val isPackageObject = sourceDir.name startsWith "package"
- val className = sourceDir.name.capitalize + (if (!isPackageObject) "" else ".package")
- val url = outDir.toURI.toURL
- val loader = ScalaClassLoader.fromURLs(List(url), this.getClass.getClassLoader)
- val clazz = loader.loadClass(className)
-
- val byteCode = ByteCode.forClass(clazz)
- val result = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject)
-
- SFile(logFile) writeAll result
- diffCheck(file, compareFiles(logFile, resFile))
- }
- }
- })
-
- case "script" =>
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
-
- // check whether there is an args file
- val argsFile = new File(file.getParentFile, fileBase+".args")
- NestUI.verbose("argsFile: "+argsFile)
- val argString = file2String(argsFile)
- val succeeded = try {
- val cmdString =
- if (isWin) {
- val batchFile = new File(file.getParentFile, fileBase+".bat")
- NestUI.verbose("batchFile: "+batchFile)
- batchFile.getAbsolutePath
- }
- else file.getAbsolutePath
-
- val ok = runCommand(cmdString+argString, logFile)
- ( ok && diffCheck(file, compareOutput(file.getParentFile, logFile)) )
- }
- catch { case e: Exception => NestUI.verbose("caught "+e) ; false }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- private def crashContext(t: Throwable): LogContext = {
- try {
- logStackTrace(logFile, t, "Possible compiler crash during test of: " + testFile + "\n")
- LogContext(logFile)
- }
- catch { case t: Throwable => LogContext(null) }
- }
-
- def run(): (Boolean, LogContext) = {
- val result = try processSingleFile(testFile) catch { case t: Throwable => (false, crashContext(t)) }
- passed = Some(result._1)
- result
- }
-
- def reportResult(writers: Option[(StringWriter, PrintWriter)]) {
- writers foreach { case (swr, wr) =>
- if (passed.isEmpty) printInfoTimeout(wr)
- else printInfoEnd(passed.get, wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
-
- if (passed exists (x => !x)) {
- if (fileManager.showDiff || isPartestDebug)
- NestUI.normal(testDiff)
- if (fileManager.showLog)
- showLog(logFile)
- }
- }
- toDelete foreach (_.deleteRecursively())
- }
- }
-
- def runTest(testFile: File): TestState = {
- val runner = new Runner(testFile)
- // when option "--failed" is provided execute test only if log
- // is present (which means it failed before)
- if (fileManager.failed && !runner.logFile.canRead)
- return TestState.Ok
-
- val (success, ctx) = runner.run()
- val state = if (success) TestState.Ok else TestState.Fail
-
- runner.reportResult(ctx.writers)
- state
- }
-
- private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet
-
- private def copyTestFiles(testDir: File, destDir: File) {
- val invalidExts = List("changes", "svn", "obj")
- testDir.listFiles.toList filter (
- f => (isJavaOrScala(f) && f.isFile) ||
- (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach
- { f => fileManager.copyFile(f, destDir) }
- }
-
- private def showLog(logFile: File) {
- file2String(logFile) match {
- case "" if logFile.canRead => ()
- case "" => NestUI.failure("Couldn't open log file: " + logFile + "\n")
- case s => NestUI.normal(s)
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 20f9c701d5..1cf3aa858f 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -1,3 +1,6 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ */
package scala.tools.partest
package nest
@@ -21,7 +24,7 @@ object SBTRunner extends DirectRunner {
val testRootDir: Directory = Directory(testRootPath)
}
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.Map[String, TestState] = {
+ def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): java.util.List[TestState] = {
def failedOnlyIfRequired(files:List[File]):List[File]={
if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
}
@@ -33,7 +36,7 @@ object SBTRunner extends DirectRunner {
scalacOptions: Seq[String] = Seq(),
justFailedTests: Boolean = false)
- def mainReflect(args: Array[String]): java.util.Map[String, String] = {
+ def mainReflect(args: Array[String]): java.util.List[TestState] = {
setProp("partest.debug", "true")
val Argument = new scala.util.matching.Regex("-(.*)")
@@ -46,7 +49,7 @@ object SBTRunner extends DirectRunner {
case x => sys.error("Unknown command line options: " + x)
}
val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS ++= config.scalacOptions
+ fileManager.SCALAC_OPTS = config.scalacOptions
fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
def findClasspath(jar: String, name: String): Option[String] = {
@@ -67,22 +70,14 @@ object SBTRunner extends DirectRunner {
// TODO - Make this a flag?
//fileManager.updateCheck = true
// Now run and report...
- val runs = config.tests.filterNot(_._2.isEmpty)
- (for {
- (testType, files) <- runs
- (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
- } yield (path, fixResult(result))).seq.asJava
- }
- def fixResult(result: TestState): String = result match {
- case TestState.Ok => "OK"
- case TestState.Fail => "FAIL"
- case TestState.Timeout => "TIMEOUT"
+ val runs = config.tests.filterNot(_._2.isEmpty)
+ val result = runs.toList flatMap { case (kind, files) => reflectiveRunTestsForFiles(files, kind).asScala }
+
+ result.asJava
}
+
def main(args: Array[String]): Unit = {
- val failures = (
- for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield
- path + ( if (result == TestState.Fail) " [FAILED]" else " [TIMEOUT]" )
- )
+ val failures = mainReflect(args).asScala collect { case s if !s.isOk => s.longStatus }
// Re-list all failures so we can go figure out what went wrong.
failures foreach System.err.println
if(!failures.isEmpty) sys.exit(1)
diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala
new file mode 100644
index 0000000000..dc155b1787
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/StreamCapture.scala
@@ -0,0 +1,53 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala.tools.partest
+package nest
+
+import java.io.{ Console => _, _ }
+
+object StreamCapture {
+ case class Captured[T](stdout: String, stderr: String, result: T) {
+ override def toString = s"""
+ |result: $result
+ |[stdout]
+ |$stdout
+ |[stderr]
+ |$stderr""".stripMargin.trim
+ }
+
+ private def mkStream = {
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr, true)
+ val ostream = new PrintStream(new OutputStream { def write(b: Int): Unit = wr write b }, true) // autoFlush = true
+
+ (ostream, () => { ostream.close() ; swr.toString })
+ }
+
+ def savingSystem[T](body: => T): T = {
+ val savedOut = System.out
+ val savedErr = System.err
+ try body
+ finally {
+ System setErr savedErr
+ System setOut savedOut
+ }
+ }
+
+ def apply[T](body: => T): Captured[T] = {
+ val (outstream, stdoutFn) = mkStream
+ val (errstream, stderrFn) = mkStream
+
+ val result = savingSystem {
+ System setOut outstream
+ System setErr errstream
+ Console.withOut(outstream) {
+ Console.withErr(errstream) {
+ body
+ }
+ }
+ }
+ Captured(stdoutFn(), stderrFn(), result)
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
deleted file mode 100644
index 880c6e431b..0000000000
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{ File => JFile }
-import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io._
-import scala.util.Properties.{ propIsSet, propOrElse, setProp }
-
-trait TestFileCommon {
- def file: JFile
- def kind: String
-
- val dir = file.toAbsolute.parent
- val fileBase = file.stripExtension
- val flags = dir / (fileBase + ".flags") ifFile (f => f.slurp().trim)
-
- lazy val objectDir = dir / (fileBase + "-" + kind + ".obj") createDirectory true
- def setOutDirTo = objectDir
-}
-
-abstract class TestFile(val kind: String) extends TestFileCommon {
- def file: JFile
- def fileManager: FileManager
-
- def defineSettings(settings: Settings, setOutDir: Boolean) = {
- settings.classpath append dir.path
- if (setOutDir)
- settings.outputDirs setSingleOutput setOutDirTo.path
-
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- settings.classpath prepend PathSettings.srcCodeLib.toString
- if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", ""))
-
- // have to catch bad flags somewhere
- (flags forall (f => settings.processArgumentString(f)._1)) && {
- settings.classpath append fileManager.CLASSPATH
- true
- }
- }
-
- override def toString(): String = "%s %s".format(kind, file)
-}
-
-case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos")
-case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg")
-case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run")
-case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck")
-case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm")
-case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") {
- override def setOutDirTo = file.parent
-}
-case class ScalapTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalap") {
- override def setOutDirTo = file.parent
-}
-case class SpecializedTestFile(file: JFile, fileManager: FileManager) extends TestFile("specialized") {
- override def defineSettings(settings: Settings, setOutDir: Boolean): Boolean = {
- super.defineSettings(settings, setOutDir) && {
- // add the instrumented library version to classpath
- settings.classpath prepend PathSettings.srcSpecLib.toString
- // @partest maintainer: if we use a custom Scala build (specified via --classpath)
- // then the classes provided by it will come earlier than instrumented.jar in the resulting classpath
- // this entire classpath business needs a thorough solution
- if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcSpecLib.toString + ";" + propOrElse("java.class.path", ""))
- true
- }
- }
-}
-case class PresentationTestFile(file: JFile, fileManager: FileManager) extends TestFile("presentation")
-case class AntTestFile(file: JFile, fileManager: FileManager) extends TestFile("ant")
-case class InstrumentedTestFile(file: JFile, fileManager: FileManager) extends TestFile("instrumented")
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index 2b2ce2e435..5a1afeb77f 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -4,31 +4,124 @@
package scala.tools
-import java.io.{ FileNotFoundException, File => JFile }
-import nsc.io.{ Path, Directory, File => SFile }
-import scala.tools.util.PathResolver
-import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+import java.util.concurrent.{ Callable, ExecutorService }
+import scala.concurrent.duration.Duration
import scala.sys.process.javaVmArguments
-import java.util.concurrent.Callable
-
-package partest {
- class TestState { }
- object TestState {
- val Ok = new TestState
- val Fail = new TestState
- val Timeout = new TestState
- }
-}
+import scala.tools.partest.nest.NestUI
+import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
package object partest {
- import nest.NestUI
+ type File = java.io.File
+ type SFile = scala.reflect.io.File
+ type Directory = scala.reflect.io.Directory
+ type Path = scala.reflect.io.Path
+ type PathResolver = scala.tools.util.PathResolver
+ type ClassPath[T] = scala.tools.nsc.util.ClassPath[T]
+ type StringWriter = java.io.StringWriter
+
+ val SFile = scala.reflect.io.File
+ val Directory = scala.reflect.io.Directory
+ val Path = scala.reflect.io.Path
+ val PathResolver = scala.tools.util.PathResolver
+ val ClassPath = scala.tools.nsc.util.ClassPath
+
+ val space = "\u0020"
+ val EOL = scala.compat.Platform.EOL
+ def onull(s: String) = if (s == null) "" else s
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString space
+ def nljoin(xs: String*): String = oempty(xs: _*) mkString EOL
+
+ implicit val codec = scala.io.Codec.UTF8
+
+ def setUncaughtHandler() = {
+ Thread.setDefaultUncaughtExceptionHandler(
+ new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, t: Throwable) {
+ val t1 = Exceptional unwrap t
+ System.err.println(s"Uncaught exception on thread $thread: $t1")
+ t1.printStackTrace()
+ }
+ }
+ )
+ }
+
+ /** Sources have a numerical group, specified by name_7 and so on. */
+ private val GroupPattern = """.*_(\d+)""".r
+
+ implicit class FileOps(val f: File) {
+ private def sf = SFile(f)
+
+ def testIdent = {
+ f.toString split """[/\\]+""" takeRight 2 mkString "/" // e.g. pos/t1234
+ }
+
+ def mapInPlace(mapFn: String => String)(filterFn: String => Boolean = _ => true): Unit =
+ writeAll(fileLines filter filterFn map (x => mapFn(x) + EOL): _*)
+
+ def appendAll(strings: String*): Unit = sf.appendAll(strings: _*)
+ def writeAll(strings: String*): Unit = sf.writeAll(strings: _*)
+ def absolutePathSegments: List[String] = f.getAbsolutePath split """[/\\]+""" toList
+
+ def isJava = f.isFile && (sf hasExtension "java")
+ def isScala = f.isFile && (sf hasExtension "scala")
+ def isJavaOrScala = isJava || isScala
- implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
- implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
+ def extension = sf.extension
+ def hasExtension(ext: String) = sf hasExtension ext
+ def changeExtension(ext: String): File = (sf changeExtension ext).jfile
+
+ /** The group number for this source file, or -1 for no group. */
+ def group: Int =
+ sf.stripExtension match {
+ case GroupPattern(g) if g.toInt >= 0 => g.toInt
+ case _ => -1
+ }
+
+ def fileContents: String = try sf.slurp() catch { case _: java.io.FileNotFoundException => "" }
+ def fileLines: List[String] = augmentString(fileContents).lines.toList
+ }
+
+ implicit class PathOps(p: Path) extends FileOps(p.jfile) { }
+
+ implicit class Copier(val f: SFile) extends AnyVal {
+ def copyTo(dest: Path): Unit = dest.toFile writeAll f.slurp(scala.io.Codec.UTF8)
+ }
+
+ implicit class LoaderOps(val loader: ClassLoader) extends AnyVal {
+ import scala.util.control.Exception.catching
+ /** Like ScalaClassLoader.create for the case where the result type is
+ * available to the current class loader, implying that the current
+ * loader is a parent of `loader`.
+ */
+ def instantiate[A >: Null](name: String): A = (
+ catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
+ (loader loadClass name).newInstance.asInstanceOf[A] orNull
+ )
+ }
+
+ implicit class ExecutorOps(val executor: ExecutorService) {
+ def awaitTermination[A](wait: Duration)(failing: => A = ()): Option[A] = (
+ if (executor awaitTermination (wait.length, wait.unit)) None
+ else Some(failing)
+ )
+ }
+
+ implicit def temporaryPath2File(x: Path): File = x.jfile
+ implicit def stringPathToJavaFile(path: String): File = new File(path)
implicit lazy val postfixOps = scala.language.postfixOps
implicit lazy val implicitConversions = scala.language.implicitConversions
+ def fileSeparator = java.io.File.separator
+ def pathSeparator = java.io.File.pathSeparator
+
+ def pathToTestIdent(path: Path) = path.jfile.testIdent
+
+ def canonicalizeSlashes(line: String) = line.replaceAll("""[/\\]+""", "/")
+
+ def words(s: String): List[String] = (s.trim split "\\s+").toList
+
def timed[T](body: => T): (T, Long) = {
val t1 = System.currentTimeMillis
val result = body
@@ -39,15 +132,40 @@ package object partest {
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
- def file2String(f: JFile) =
- try SFile(f).slurp(scala.io.Codec.UTF8)
- catch { case _: FileNotFoundException => "" }
+ def file2String(f: File): String = f.fileContents
def basename(name: String): String = Path(name).stripExtension
- def resultsToStatistics(results: Iterable[(_, TestState)]): (Int, Int) = {
- val (files, failures) = results map (_._2 == TestState.Ok) partition (_ == true)
- (files.size, failures.size)
+ /** In order to allow for spaces in flags/options, this
+ * parses .flags, .javaopts, javacopts etc files as follows:
+ * If it is exactly one line, it is split (naively) on spaces.
+ * If it contains more than one line, each line is its own
+ * token, spaces and all.
+ */
+ def readOptionsFile(file: File): List[String] = {
+ file.fileLines match {
+ case x :: Nil => words(x)
+ case xs => xs map (_.trim)
+ }
+ }
+
+ def findProgram(name: String): Option[File] = {
+ val pathDirs = sys.env("PATH") match {
+ case null => List("/usr/local/bin", "/usr/bin", "/bin")
+ case path => path split "[:;]" filterNot (_ == "") toList
+ }
+ pathDirs.iterator map (d => new File(d, name)) find (_.canExecute)
+ }
+
+ def now = (new java.util.Date).toString
+ def elapsedString(millis: Long): String = {
+ val elapsedSecs = millis/1000
+ val elapsedMins = elapsedSecs/60
+ val elapsedHrs = elapsedMins/60
+ val dispMins = elapsedMins - elapsedHrs * 60
+ val dispSecs = elapsedSecs - elapsedMins * 60
+
+ "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
}
def vmArgString = javaVmArguments.mkString(
@@ -62,13 +180,10 @@ package object partest {
}
def showAllJVMInfo() {
- NestUI.verbose(vmArgString)
- NestUI.verbose(allPropertiesString)
+ vlog(vmArgString)
+ vlog(allPropertiesString)
}
- def isPartestDebug: Boolean =
- propOrEmpty("partest.debug") == "true"
-
import scala.language.experimental.macros
/**
@@ -117,4 +232,10 @@ package object partest {
a.tree))))),
a.tree))
}
+
+ def isPartestTerse = NestUI.isTerse
+ def isPartestDebug = NestUI.isDebug
+ def isPartestVerbose = NestUI.isVerbose
+
+ def vlog(msg: => String) = if (isPartestVerbose) System.err.println(msg)
}
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
index 1263c96e9e..b9394b50c9 100644
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ b/src/partest/scala/tools/partest/utils/Properties.scala
@@ -14,4 +14,5 @@ package utils
object Properties extends scala.util.PropertiesTrait {
protected def propCategory = "partest"
protected def pickJarBasedOn = classOf[nest.RunnerManager]
+ override def isAvian = super.isAvian
}
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
index 09eaf7afb4..e19e0cefad 100644
--- a/src/reflect/scala/reflect/api/Annotations.scala
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import scala.collection.immutable.ListMap
@@ -45,12 +46,6 @@ trait Annotations { self: Universe =>
*/
type Annotation >: Null <: AnyRef with AnnotationApi
- /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotationTag: ClassTag[Annotation]
-
/** The constructor/extractor for `Annotation` instances.
* @group Extractors
*/
@@ -90,11 +85,6 @@ trait Annotations { self: Universe =>
*/
type JavaArgument >: Null <: AnyRef
- /** A tag that preserves the identity of the `JavaArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val JavaArgumentTag: ClassTag[JavaArgument]
/** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`
* @template
@@ -102,12 +92,6 @@ trait Annotations { self: Universe =>
*/
type LiteralArgument >: Null <: AnyRef with JavaArgument with LiteralArgumentApi
- /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
-
/** The constructor/extractor for `LiteralArgument` instances.
* @group Extractors
*/
@@ -137,12 +121,6 @@ trait Annotations { self: Universe =>
*/
type ArrayArgument >: Null <: AnyRef with JavaArgument with ArrayArgumentApi
- /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
-
/** The constructor/extractor for `ArrayArgument` instances.
* @group Extractors
*/
@@ -172,12 +150,6 @@ trait Annotations { self: Universe =>
*/
type NestedArgument >: Null <: AnyRef with JavaArgument with NestedArgumentApi
- /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NestedArgumentTag: ClassTag[NestedArgument]
-
/** The constructor/extractor for `NestedArgument` instances.
* @group Extractors
*/
@@ -200,4 +172,4 @@ trait Annotations { self: Universe =>
/** The underlying nested annotation. */
def annotation: Annotation
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 8f256aa1f5..2e95a01176 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index f3d75c3c00..c654961f4a 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -183,12 +184,6 @@ trait Constants {
*/
type Constant >: Null <: AnyRef with ConstantApi
- /** A tag that preserves the identity of the `Constant` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ConstantTag: ClassTag[Constant]
-
/** The constructor/extractor for `Constant` instances.
* @group Extractors
*/
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 2ba18a8207..009d9dbfdb 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
import scala.reflect.runtime.{universe => ru}
@@ -174,4 +175,4 @@ private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakType
import ru._
Expr(rootMirror, treec)(tag)
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 4357aec9c9..8a1d2f7f1d 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import scala.language.implicitConversions
@@ -61,12 +62,6 @@ trait FlagSets { self: Universe =>
*/
type FlagSet
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FlagSetTag: ClassTag[FlagSet]
-
/** The API of `FlagSet` instances.
* The main source of information about flag sets is the [[scala.reflect.api.FlagSets]] page.
* @group Flags
diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala
index 3f377d6cff..1b654a4a8d 100644
--- a/src/reflect/scala/reflect/api/ImplicitTags.scala
+++ b/src/reflect/scala/reflect/api/ImplicitTags.scala
@@ -1,108 +1,117 @@
-package scala.reflect
+package scala
+package reflect
package api
+/** Tags which preserve the identity of abstract types in the face of erasure.
+ * Can be used for pattern matching, instance tests, serialization and the like.
+ * @group Tags
+ */
trait ImplicitTags {
- self: Types =>
+ self: Universe =>
- /** A tag that preserves the identity of the `Type` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeTagg: ClassTag[Type]
-
- /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingletonTypeTag: ClassTag[SingletonType]
-
- /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThisTypeTag: ClassTag[ThisType]
-
- /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingleTypeTag: ClassTag[SingleType]
-
- /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SuperTypeTag: ClassTag[SuperType]
-
- /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ConstantTypeTag: ClassTag[ConstantType]
-
- /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeRefTag: ClassTag[TypeRef]
-
- /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CompoundTypeTag: ClassTag[CompoundType]
-
- /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val RefinedTypeTag: ClassTag[RefinedType]
-
- /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
+ // Tags for Types.
+ implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+ implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
-
- /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
+ implicit val CompoundTypeTag: ClassTag[CompoundType]
+ implicit val ConstantTypeTag: ClassTag[ConstantType]
+ implicit val ExistentialTypeTag: ClassTag[ExistentialType]
implicit val MethodTypeTag: ClassTag[MethodType]
-
- /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
-
- /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
implicit val PolyTypeTag: ClassTag[PolyType]
-
- /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ExistentialTypeTag: ClassTag[ExistentialType]
-
- /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
-
- /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
+ implicit val RefinedTypeTag: ClassTag[RefinedType]
+ implicit val SingleTypeTag: ClassTag[SingleType]
+ implicit val SingletonTypeTag: ClassTag[SingletonType]
+ implicit val SuperTypeTag: ClassTag[SuperType]
+ implicit val ThisTypeTag: ClassTag[ThisType]
implicit val TypeBoundsTag: ClassTag[TypeBounds]
+ implicit val TypeRefTag: ClassTag[TypeRef]
+ implicit val TypeTagg: ClassTag[Type]
- /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+ // Tags for Names.
+ implicit val NameTag: ClassTag[Name]
+ implicit val TermNameTag: ClassTag[TermName]
+ implicit val TypeNameTag: ClassTag[TypeName]
+
+ // Tags for Scopes.
+ implicit val ScopeTag: ClassTag[Scope]
+ implicit val MemberScopeTag: ClassTag[MemberScope]
+
+ // Tags for Annotations.
+ implicit val AnnotationTag: ClassTag[Annotation]
+ implicit val JavaArgumentTag: ClassTag[JavaArgument]
+ implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+ implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+ implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+ // Tags for Symbols.
+ implicit val TermSymbolTag: ClassTag[TermSymbol]
+ implicit val MethodSymbolTag: ClassTag[MethodSymbol]
+ implicit val SymbolTag: ClassTag[Symbol]
+ implicit val TypeSymbolTag: ClassTag[TypeSymbol]
+ implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
+ implicit val ClassSymbolTag: ClassTag[ClassSymbol]
+ implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
+ implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
+
+ // Tags for misc Tree relatives.
+ implicit val PositionTag: ClassTag[Position]
+ implicit val ConstantTag: ClassTag[Constant]
+ implicit val FlagSetTag: ClassTag[FlagSet]
+ implicit val ModifiersTag: ClassTag[Modifiers]
+
+ // Tags for Trees. WTF.
+ implicit val AlternativeTag: ClassTag[Alternative]
+ implicit val AnnotatedTag: ClassTag[Annotated]
+ implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
+ implicit val ApplyTag: ClassTag[Apply]
+ implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
+ implicit val AssignTag: ClassTag[Assign]
+ implicit val BindTag: ClassTag[Bind]
+ implicit val BlockTag: ClassTag[Block]
+ implicit val CaseDefTag: ClassTag[CaseDef]
+ implicit val ClassDefTag: ClassTag[ClassDef]
+ implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
+ implicit val DefDefTag: ClassTag[DefDef]
+ implicit val DefTreeTag: ClassTag[DefTree]
+ implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
+ implicit val FunctionTag: ClassTag[Function]
+ implicit val GenericApplyTag: ClassTag[GenericApply]
+ implicit val IdentTag: ClassTag[Ident]
+ implicit val IfTag: ClassTag[If]
+ implicit val ImplDefTag: ClassTag[ImplDef]
+ implicit val ImportSelectorTag: ClassTag[ImportSelector]
+ implicit val ImportTag: ClassTag[Import]
+ implicit val LabelDefTag: ClassTag[LabelDef]
+ implicit val LiteralTag: ClassTag[Literal]
+ implicit val MatchTag: ClassTag[Match]
+ implicit val MemberDefTag: ClassTag[MemberDef]
+ implicit val ModuleDefTag: ClassTag[ModuleDef]
+ implicit val NameTreeTag: ClassTag[NameTree]
+ implicit val NewTag: ClassTag[New]
+ implicit val PackageDefTag: ClassTag[PackageDef]
+ implicit val RefTreeTag: ClassTag[RefTree]
+ implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
+ implicit val ReturnTag: ClassTag[Return]
+ implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
+ implicit val SelectTag: ClassTag[Select]
+ implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
+ implicit val StarTag: ClassTag[Star]
+ implicit val SuperTag: ClassTag[Super]
+ implicit val SymTreeTag: ClassTag[SymTree]
+ implicit val TemplateTag: ClassTag[Template]
+ implicit val TermTreeTag: ClassTag[TermTree]
+ implicit val ThisTag: ClassTag[This]
+ implicit val ThrowTag: ClassTag[Throw]
+ implicit val TreeTag: ClassTag[Tree]
+ implicit val TryTag: ClassTag[Try]
+ implicit val TypTreeTag: ClassTag[TypTree]
+ implicit val TypeApplyTag: ClassTag[TypeApply]
+ implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
+ implicit val TypeDefTag: ClassTag[TypeDef]
+ implicit val TypeTreeTag: ClassTag[TypeTree]
+ implicit val TypedTag: ClassTag[Typed]
+ implicit val UnApplyTag: ClassTag[UnApply]
+ implicit val ValDefTag: ClassTag[ValDef]
+ implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
}
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
index afc4f2f25d..e6f314b712 100644
--- a/src/reflect/scala/reflect/api/Importers.scala
+++ b/src/reflect/scala/reflect/api/Importers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/JavaMirrors.scala b/src/reflect/scala/reflect/api/JavaMirrors.scala
index b678033e1a..23abc23eb9 100644
--- a/src/reflect/scala/reflect/api/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/api/JavaMirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala
index 04d091ee9d..6fc76c9693 100644
--- a/src/reflect/scala/reflect/api/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/api/JavaUniverse.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 1223326d7c..e0219c9074 100644
--- a/src/reflect/scala/reflect/api/Mirror.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -49,16 +50,16 @@ abstract class Mirror[U <: Universe with Singleton] {
* If you need a symbol that corresponds to the type alias itself, load it directly from the package class:
*
* scala> cm.staticClass("scala.List")
- * res0: reflect.runtime.universe.ClassSymbol = class List
+ * res0: scala.reflect.runtime.universe.ClassSymbol = class List
*
* scala> res0.fullName
* res1: String = scala.collection.immutable.List
*
* scala> cm.staticPackage("scala")
- * res2: reflect.runtime.universe.ModuleSymbol = package scala
+ * res2: scala.reflect.runtime.universe.ModuleSymbol = package scala
*
* scala> res2.moduleClass.typeSignature member newTypeName("List")
- * res3: reflect.runtime.universe.Symbol = type List
+ * res3: scala.reflect.runtime.universe.Symbol = type List
*
* scala> res3.fullName
* res4: String = scala.List
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index d30563c706..d702555ba6 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -133,9 +134,7 @@ package api
* scala> fmX.get
* res0: Any = 2
*
- * scala> fmX.set(3)
- * scala.ScalaReflectionException: cannot set an immutable field x
- * ...
+ * scala> fmX.set(3) // NOTE: can set an underlying value of an immutable field!
*
* scala> val fieldY = typeOf[C].declaration(newTermName("y")).asTerm.accessed.asTerm
* fieldY: reflect.runtime.universe.TermSymbol = variable y
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index 6290b88d33..f74e0ce014 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -1,6 +1,9 @@
-package scala.reflect
+package scala
+package reflect
package api
+import scala.language.implicitConversions
+
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
@@ -43,34 +46,16 @@ trait Names {
*/
type Name >: Null <: NameApi
- /** A tag that preserves the identity of the `Name` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NameTag: ClassTag[Name]
-
/** The abstract type of names representing terms.
* @group Names
*/
type TypeName >: Null <: Name
- /** A tag that preserves the identity of the `TypeName` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeNameTag: ClassTag[TypeName]
-
/** The abstract type of names representing types.
* @group Names
*/
type TermName >: Null <: Name
- /** A tag that preserves the identity of the `TermName` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TermNameTag: ClassTag[TermName]
-
/** The API of Name instances.
* @group API
*/
diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala
index 63c67627a3..4d5f1f6e67 100644
--- a/src/reflect/scala/reflect/api/Position.scala
+++ b/src/reflect/scala/reflect/api/Position.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import scala.reflect.macros.Attachments
diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala
index 87f00fdb88..8ad46418f8 100644
--- a/src/reflect/scala/reflect/api/Positions.scala
+++ b/src/reflect/scala/reflect/api/Positions.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -19,13 +20,6 @@ trait Positions {
* @group Positions
*/
type Position >: Null <: scala.reflect.api.Position { type Pos = Position }
-
- /** A tag that preserves the identity of the `Position` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val PositionTag: ClassTag[Position]
-
/** A special "missing" position.
* @group Positions
*/
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 162fe1296b..81d30dec1e 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import java.io.{ PrintWriter, StringWriter }
diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala
index 7f9799393c..2eb477f652 100644
--- a/src/reflect/scala/reflect/api/Scopes.scala
+++ b/src/reflect/scala/reflect/api/Scopes.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -33,12 +34,6 @@ trait Scopes { self: Universe =>
*/
trait ScopeApi extends Iterable[Symbol]
- /** A tag that preserves the identity of the `Scope` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ScopeTag: ClassTag[Scope]
-
/** Create a new scope with the given initial elements.
* @group Scopes
*/
@@ -61,10 +56,4 @@ trait Scopes { self: Universe =>
*/
def sorted: List[Symbol]
}
-
- /** A tag that preserves the identity of the `MemberScope` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MemberScopeTag: ClassTag[MemberScope]
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index 721b0bc7f2..bbfebcb434 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
index 6c78f18716..aec5f19fa0 100644
--- a/src/reflect/scala/reflect/api/StandardNames.scala
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
// Q: I have a pretty name. Can I put it here?
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index dbad3dd478..1250545497 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -61,12 +62,6 @@ trait Symbols { self: Universe =>
*/
type Symbol >: Null <: SymbolApi
- /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SymbolTag: ClassTag[Symbol]
-
/** The type of type symbols representing type, class, and trait declarations,
* as well as type parameters.
* @group Symbols
@@ -74,12 +69,6 @@ trait Symbols { self: Universe =>
*/
type TypeSymbol >: Null <: Symbol with TypeSymbolApi
- /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeSymbolTag: ClassTag[TypeSymbol]
-
/** The type of term symbols representing val, var, def, and object declarations as
* well as packages and value parameters.
* @group Symbols
@@ -87,72 +76,36 @@ trait Symbols { self: Universe =>
*/
type TermSymbol >: Null <: Symbol with TermSymbolApi
- /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TermSymbolTag: ClassTag[TermSymbol]
-
/** The type of method symbols representing def declarations.
* @group Symbols
* @template
*/
type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
- /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MethodSymbolTag: ClassTag[MethodSymbol]
-
/** The type of module symbols representing object declarations.
* @group Symbols
* @template
*/
type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi
- /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
-
/** The type of class symbols representing class and trait definitions.
* @group Symbols
* @template
*/
type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi
- /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ClassSymbolTag: ClassTag[ClassSymbol]
-
/** The type of free terms introduced by reification.
* @group Symbols
* @template
*/
type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi
- /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
-
/** The type of free types introduced by reification.
* @group Symbols
* @template
*/
type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
- /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
-
/** A special "missing" symbol. Commonly used in the API to denote a default or empty value.
* @group Symbols
* @template
diff --git a/src/reflect/scala/reflect/api/TagInterop.scala b/src/reflect/scala/reflect/api/TagInterop.scala
index 5de811578e..51b7c519c5 100644
--- a/src/reflect/scala/reflect/api/TagInterop.scala
+++ b/src/reflect/scala/reflect/api/TagInterop.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala
index 6969418470..027c840955 100644
--- a/src/reflect/scala/reflect/api/TreeCreator.scala
+++ b/src/reflect/scala/reflect/api/TreeCreator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/** This is an internal implementation class.
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 18985fe83d..f7a6a68946 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -60,12 +61,6 @@ trait Trees { self: Universe =>
*/
type Tree >: Null <: TreeApi
- /** A tag that preserves the identity of the `Tree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TreeTag: ClassTag[Tree]
-
/** The API that all trees support.
* The main source of information about trees is the [[scala.reflect.api.Trees]] page.
* @group API
@@ -230,12 +225,6 @@ trait Trees { self: Universe =>
*/
type TermTree >: Null <: AnyRef with Tree with TermTreeApi
- /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TermTreeTag: ClassTag[TermTree]
-
/** The API that all term trees support
* @group API
*/
@@ -249,12 +238,6 @@ trait Trees { self: Universe =>
*/
type TypTree >: Null <: AnyRef with Tree with TypTreeApi
- /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypTreeTag: ClassTag[TypTree]
-
/** The API that all typ trees support
* @group API
*/
@@ -267,12 +250,6 @@ trait Trees { self: Universe =>
*/
type SymTree >: Null <: AnyRef with Tree with SymTreeApi
- /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SymTreeTag: ClassTag[SymTree]
-
/** The API that all sym trees support
* @group API
*/
@@ -287,12 +264,6 @@ trait Trees { self: Universe =>
*/
type NameTree >: Null <: AnyRef with Tree with NameTreeApi
- /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NameTreeTag: ClassTag[NameTree]
-
/** The API that all name trees support
* @group API
*/
@@ -311,12 +282,6 @@ trait Trees { self: Universe =>
*/
type RefTree >: Null <: SymTree with NameTree with RefTreeApi
- /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val RefTreeTag: ClassTag[RefTree]
-
/** The API that all ref trees support
* @group API
*/
@@ -331,18 +296,26 @@ trait Trees { self: Universe =>
def name: Name
}
+ /** The constructor/extractor for `RefTree` instances.
+ * @group Extractors
+ */
+ val RefTree: RefTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `RefTree(qual, name)`.
+ * This AST node corresponds to either Ident, Select or SelectFromTypeTree.
+ * @group Extractors
+ */
+ abstract class RefTreeExtractor {
+ def apply(qualifier: Tree, name: Name): RefTree
+ def unapply(refTree: RefTree): Option[(Tree, Name)]
+ }
+
/** A tree which defines a symbol-carrying entity.
* @group Trees
* @template
*/
type DefTree >: Null <: SymTree with NameTree with DefTreeApi
- /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val DefTreeTag: ClassTag[DefTree]
-
/** The API that all def trees support
* @group API
*/
@@ -358,12 +331,6 @@ trait Trees { self: Universe =>
*/
type MemberDef >: Null <: DefTree with MemberDefApi
- /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MemberDefTag: ClassTag[MemberDef]
-
/** The API that all member defs support
* @group API
*/
@@ -378,12 +345,6 @@ trait Trees { self: Universe =>
*/
type PackageDef >: Null <: MemberDef with PackageDefApi
- /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val PackageDefTag: ClassTag[PackageDef]
-
/** The constructor/extractor for `PackageDef` instances.
* @group Extractors
*/
@@ -417,12 +378,6 @@ trait Trees { self: Universe =>
*/
type ImplDef >: Null <: MemberDef with ImplDefApi
- /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ImplDefTag: ClassTag[ImplDef]
-
/** The API that all impl defs support
* @group API
*/
@@ -437,12 +392,6 @@ trait Trees { self: Universe =>
*/
type ClassDef >: Null <: ImplDef with ClassDefApi
- /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ClassDefTag: ClassTag[ClassDef]
-
/** The constructor/extractor for `ClassDef` instances.
* @group Extractors
*/
@@ -488,12 +437,6 @@ trait Trees { self: Universe =>
*/
type ModuleDef >: Null <: ImplDef with ModuleDefApi
- /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ModuleDefTag: ClassTag[ModuleDef]
-
/** The constructor/extractor for `ModuleDef` instances.
* @group Extractors
*/
@@ -534,12 +477,6 @@ trait Trees { self: Universe =>
*/
type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi
- /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
-
/** The API that all val defs and def defs support
* @group API
*/
@@ -571,12 +508,6 @@ trait Trees { self: Universe =>
*/
type ValDef >: Null <: ValOrDefDef with ValDefApi
- /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ValDefTag: ClassTag[ValDef]
-
/** The constructor/extractor for `ValDef` instances.
* @group Extractors
*/
@@ -626,12 +557,6 @@ trait Trees { self: Universe =>
*/
type DefDef >: Null <: ValOrDefDef with DefDefApi
- /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val DefDefTag: ClassTag[DefDef]
-
/** The constructor/extractor for `DefDef` instances.
* @group Extractors
*/
@@ -681,12 +606,6 @@ trait Trees { self: Universe =>
*/
type TypeDef >: Null <: MemberDef with TypeDefApi
- /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeDefTag: ClassTag[TypeDef]
-
/** The constructor/extractor for `TypeDef` instances.
* @group Extractors
*/
@@ -746,12 +665,6 @@ trait Trees { self: Universe =>
*/
type LabelDef >: Null <: DefTree with TermTree with LabelDefApi
- /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val LabelDefTag: ClassTag[LabelDef]
-
/** The constructor/extractor for `LabelDef` instances.
* @group Extractors
*/
@@ -808,12 +721,6 @@ trait Trees { self: Universe =>
*/
type ImportSelector >: Null <: AnyRef with ImportSelectorApi
- /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ImportSelectorTag: ClassTag[ImportSelector]
-
/** The constructor/extractor for `ImportSelector` instances.
* @group Extractors
*/
@@ -860,12 +767,6 @@ trait Trees { self: Universe =>
*/
type Import >: Null <: SymTree with ImportApi
- /** A tag that preserves the identity of the `Import` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ImportTag: ClassTag[Import]
-
/** The constructor/extractor for `Import` instances.
* @group Extractors
*/
@@ -918,12 +819,6 @@ trait Trees { self: Universe =>
*/
type Template >: Null <: SymTree with TemplateApi
- /** A tag that preserves the identity of the `Template` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TemplateTag: ClassTag[Template]
-
/** The constructor/extractor for `Template` instances.
* @group Extractors
*/
@@ -976,12 +871,6 @@ trait Trees { self: Universe =>
*/
type Block >: Null <: TermTree with BlockApi
- /** A tag that preserves the identity of the `Block` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BlockTag: ClassTag[Block]
-
/** The constructor/extractor for `Block` instances.
* @group Extractors
*/
@@ -1021,12 +910,6 @@ trait Trees { self: Universe =>
*/
type CaseDef >: Null <: AnyRef with Tree with CaseDefApi
- /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CaseDefTag: ClassTag[CaseDef]
-
/** The constructor/extractor for `CaseDef` instances.
* @group Extractors
*/
@@ -1074,12 +957,6 @@ trait Trees { self: Universe =>
*/
type Alternative >: Null <: TermTree with AlternativeApi
- /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AlternativeTag: ClassTag[Alternative]
-
/** The constructor/extractor for `Alternative` instances.
* @group Extractors
*/
@@ -1112,12 +989,6 @@ trait Trees { self: Universe =>
*/
type Star >: Null <: TermTree with StarApi
- /** A tag that preserves the identity of the `Star` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val StarTag: ClassTag[Star]
-
/** The constructor/extractor for `Star` instances.
* @group Extractors
*/
@@ -1153,12 +1024,6 @@ trait Trees { self: Universe =>
*/
type Bind >: Null <: DefTree with BindApi
- /** A tag that preserves the identity of the `Bind` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BindTag: ClassTag[Bind]
-
/** The constructor/extractor for `Bind` instances.
* @group Extractors
*/
@@ -1222,12 +1087,6 @@ trait Trees { self: Universe =>
*/
type UnApply >: Null <: TermTree with UnApplyApi
- /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val UnApplyTag: ClassTag[UnApply]
-
/** The constructor/extractor for `UnApply` instances.
* @group Extractors
*/
@@ -1264,12 +1123,6 @@ trait Trees { self: Universe =>
*/
type Function >: Null <: TermTree with SymTree with FunctionApi
- /** A tag that preserves the identity of the `Function` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FunctionTag: ClassTag[Function]
-
/** The constructor/extractor for `Function` instances.
* @group Extractors
*/
@@ -1308,12 +1161,6 @@ trait Trees { self: Universe =>
*/
type Assign >: Null <: TermTree with AssignApi
- /** A tag that preserves the identity of the `Assign` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AssignTag: ClassTag[Assign]
-
/** The constructor/extractor for `Assign` instances.
* @group Extractors
*/
@@ -1350,12 +1197,6 @@ trait Trees { self: Universe =>
*/
type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi
- /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
-
/** The constructor/extractor for `AssignOrNamedArg` instances.
* @group Extractors
*/
@@ -1397,12 +1238,6 @@ trait Trees { self: Universe =>
*/
type If >: Null <: TermTree with IfApi
- /** A tag that preserves the identity of the `If` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val IfTag: ClassTag[If]
-
/** The constructor/extractor for `If` instances.
* @group Extractors
*/
@@ -1454,12 +1289,6 @@ trait Trees { self: Universe =>
*/
type Match >: Null <: TermTree with MatchApi
- /** A tag that preserves the identity of the `Match` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MatchTag: ClassTag[Match]
-
/** The constructor/extractor for `Match` instances.
* @group Extractors
*/
@@ -1495,12 +1324,6 @@ trait Trees { self: Universe =>
*/
type Return >: Null <: TermTree with SymTree with ReturnApi
- /** A tag that preserves the identity of the `Return` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ReturnTag: ClassTag[Return]
-
/** The constructor/extractor for `Return` instances.
* @group Extractors
*/
@@ -1533,12 +1356,6 @@ trait Trees { self: Universe =>
*/
type Try >: Null <: TermTree with TryApi
- /** A tag that preserves the identity of the `Try` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TryTag: ClassTag[Try]
-
/** The constructor/extractor for `Try` instances.
* @group Extractors
*/
@@ -1577,12 +1394,6 @@ trait Trees { self: Universe =>
*/
type Throw >: Null <: TermTree with ThrowApi
- /** A tag that preserves the identity of the `Throw` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThrowTag: ClassTag[Throw]
-
/** The constructor/extractor for `Throw` instances.
* @group Extractors
*/
@@ -1613,12 +1424,6 @@ trait Trees { self: Universe =>
*/
type New >: Null <: TermTree with NewApi
- /** A tag that preserves the identity of the `New` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NewTag: ClassTag[New]
-
/** The constructor/extractor for `New` instances.
* @group Extractors
*/
@@ -1669,12 +1474,6 @@ trait Trees { self: Universe =>
*/
type Typed >: Null <: TermTree with TypedApi
- /** A tag that preserves the identity of the `Typed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypedTag: ClassTag[Typed]
-
/** The constructor/extractor for `Typed` instances.
* @group Extractors
*/
@@ -1708,12 +1507,6 @@ trait Trees { self: Universe =>
*/
type GenericApply >: Null <: TermTree with GenericApplyApi
- /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val GenericApplyTag: ClassTag[GenericApply]
-
/** The API that all applies support
* @group API
*/
@@ -1735,12 +1528,6 @@ trait Trees { self: Universe =>
*/
type TypeApply >: Null <: GenericApply with TypeApplyApi
- /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeApplyTag: ClassTag[TypeApply]
-
/** The constructor/extractor for `TypeApply` instances.
* @group Extractors
*/
@@ -1779,12 +1566,6 @@ trait Trees { self: Universe =>
*/
type Apply >: Null <: GenericApply with ApplyApi
- /** A tag that preserves the identity of the `Apply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ApplyTag: ClassTag[Apply]
-
/** The constructor/extractor for `Apply` instances.
* @group Extractors
*/
@@ -1822,12 +1603,6 @@ trait Trees { self: Universe =>
*/
type Super >: Null <: TermTree with SuperApi
- /** A tag that preserves the identity of the `Super` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SuperTag: ClassTag[Super]
-
/** The constructor/extractor for `Super` instances.
* @group Extractors
*/
@@ -1874,12 +1649,6 @@ trait Trees { self: Universe =>
*/
type This >: Null <: TermTree with SymTree with ThisApi
- /** A tag that preserves the identity of the `This` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThisTag: ClassTag[This]
-
/** The constructor/extractor for `This` instances.
* @group Extractors
*/
@@ -1915,12 +1684,6 @@ trait Trees { self: Universe =>
*/
type Select >: Null <: RefTree with SelectApi
- /** A tag that preserves the identity of the `Select` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SelectTag: ClassTag[Select]
-
/** The constructor/extractor for `Select` instances.
* @group Extractors
*/
@@ -1960,12 +1723,6 @@ trait Trees { self: Universe =>
*/
type Ident >: Null <: RefTree with IdentApi
- /** A tag that preserves the identity of the `Ident` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val IdentTag: ClassTag[Ident]
-
/** The constructor/extractor for `Ident` instances.
* @group Extractors
*/
@@ -2005,12 +1762,6 @@ trait Trees { self: Universe =>
*/
type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi
- /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
-
/** The constructor/extractor for `ReferenceToBoxed` instances.
* @group Extractors
*/
@@ -2055,12 +1806,6 @@ trait Trees { self: Universe =>
*/
type Literal >: Null <: TermTree with LiteralApi
- /** A tag that preserves the identity of the `Literal` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val LiteralTag: ClassTag[Literal]
-
/** The constructor/extractor for `Literal` instances.
* @group Extractors
*/
@@ -2094,12 +1839,6 @@ trait Trees { self: Universe =>
*/
type Annotated >: Null <: AnyRef with Tree with AnnotatedApi
- /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotatedTag: ClassTag[Annotated]
-
/** The constructor/extractor for `Annotated` instances.
* @group Extractors
*/
@@ -2134,12 +1873,6 @@ trait Trees { self: Universe =>
*/
type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi
- /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
-
/** The constructor/extractor for `SingletonTypeTree` instances.
* @group Extractors
*/
@@ -2170,12 +1903,6 @@ trait Trees { self: Universe =>
*/
type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi
- /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
-
/** The constructor/extractor for `SelectFromTypeTree` instances.
* @group Extractors
*/
@@ -2217,12 +1944,6 @@ trait Trees { self: Universe =>
*/
type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi
- /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
-
/** The constructor/extractor for `CompoundTypeTree` instances.
* @group Extractors
*/
@@ -2253,12 +1974,6 @@ trait Trees { self: Universe =>
*/
type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi
- /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
-
/** The constructor/extractor for `AppliedTypeTree` instances.
* @group Extractors
*/
@@ -2301,12 +2016,6 @@ trait Trees { self: Universe =>
*/
type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi
- /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
-
/** The constructor/extractor for `TypeBoundsTree` instances.
* @group Extractors
*/
@@ -2344,12 +2053,6 @@ trait Trees { self: Universe =>
*/
type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi
- /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
-
/** The constructor/extractor for `ExistentialTypeTree` instances.
* @group Extractors
*/
@@ -2387,12 +2090,6 @@ trait Trees { self: Universe =>
*/
type TypeTree >: Null <: TypTree with TypeTreeApi
- /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeTreeTag: ClassTag[TypeTree]
-
/** The constructor/extractor for `TypeTree` instances.
* @group Extractors
*/
@@ -2990,12 +2687,6 @@ trait Trees { self: Universe =>
*/
type Modifiers >: Null <: AnyRef with ModifiersApi
- /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Traversal
- */
- implicit val ModifiersTag: ClassTag[Modifiers]
-
/** The API that all Modifiers support
* @group API
*/
diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala
index 24271cb48d..37fff90b43 100644
--- a/src/reflect/scala/reflect/api/TypeCreator.scala
+++ b/src/reflect/scala/reflect/api/TypeCreator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/** A mirror-aware factory for types.
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index e988971ace..7457910226 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -221,24 +221,7 @@ trait TypeTags { self: Universe =>
def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
- case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]]
- case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]]
- case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]]
- case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]]
- case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]]
- case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]]
- case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]]
- case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]]
- case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]]
- case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]]
- case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]]
- case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]]
- case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]]
- case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]]
- case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
+ new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
}
@@ -299,24 +282,7 @@ trait TypeTags { self: Universe =>
val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
- case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
- case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
- case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]]
- case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]]
- case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]]
- case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]]
- case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
- case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
- case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
- case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
- case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
- case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
- case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
- case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
+ new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index e5140f23e5..9d2d06da69 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -50,7 +51,7 @@ package api
*
* @contentDiagram hideNodes "*Api"
*/
-trait Types extends ImplicitTags {
+trait Types {
self: Universe =>
/** The type of Scala types, and also Scala type signatures.
@@ -209,6 +210,12 @@ trait Types extends ImplicitTags {
/******************* helpers *******************/
+ /** Provides an alternate if type is NoType.
+ *
+ * @group Helpers
+ */
+ def orElse(alt: => Type): Type
+
/** Substitute symbols in `to` for corresponding occurrences of references to
* symbols `from` in this type.
*/
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 4928b8bb38..cb629f9c5a 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -68,6 +69,7 @@ abstract class Universe extends Symbols
with Exprs
with TypeTags
with TagInterop
+ with ImplicitTags
with StandardDefinitions
with StandardNames
with BuildUtils
@@ -92,5 +94,5 @@ abstract class Universe extends Symbols
*/
// implementation is hardwired to `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def reify[T](expr: T): Expr[T] = ??? // macro
-} \ No newline at end of file
+ def reify[T](expr: T): Expr[T] = macro ???
+}
diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala
index dbda84dd0e..a8f409e123 100644
--- a/src/reflect/scala/reflect/api/package.scala
+++ b/src/reflect/scala/reflect/api/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
import scala.reflect.api.{Universe => ApiUniverse}
@@ -42,6 +43,6 @@ package object api {
// implementation is hardwired into `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
// todo. once we have implicit macros for tag generation, we can remove these anchors
- private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = ??? // macro
- private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = ??? // macro
+ private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = macro ???
+ private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = macro ???
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 73cc7fbbd6..74310e1c34 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
/** Additions to the type checker that can be added at
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index f9a026744c..73ca74f08e 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import pickling.ByteCodecs
@@ -39,8 +40,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
// monomorphic one by introducing existentials, see SI-7009 for details
existentialAbstraction(throwableSym.typeParams, throwableSym.tpe)
}
- val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
- withAnnotations(List(throwsAnn))
+ this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
}
/** Tests for, get, or remove an annotation */
@@ -121,25 +121,32 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
* must be `String`. This specialised class is used to encode Scala
* signatures for reasons of efficiency, both in term of class-file size
* and in term of compiler performance.
+ * Details about the storage format of pickles at the bytecode level (classfile annotations) can be found in SIP-10.
*/
case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
- lazy val encodedBytes = ByteCodecs.encode(bytes) // TODO remove after migration to ASM-based GenJVM complete
- def isLong: Boolean = (encodedBytes.length > 65535) // TODO remove after migration to ASM-based GenJVM complete
lazy val sevenBitsMayBeZero: Array[Byte] = {
mapToNextModSevenBits(scala.reflect.internal.pickling.ByteCodecs.encode8to7(bytes))
}
+
+ /* In order to store a byte array (the pickle) using a bytecode-level annotation,
+ * the most compact representation is used (which happens to be string-constant and not byte array as one would expect).
+ * However, a String constant in a classfile annotation is limited to a maximum of 65535 characters.
+ * Method `fitsInOneString` tells us whether the pickle can be held by a single classfile-annotation of string-type.
+ * Otherwise an array of strings will be used.
+ */
def fitsInOneString: Boolean = {
+ // due to escaping, a zero byte in a classfile-annotation of string-type takes actually two characters.
val numZeros = (sevenBitsMayBeZero count { b => b == 0 })
- val res = (sevenBitsMayBeZero.length + numZeros) <= 65535
- assert(this.isLong == !res, "As things stand, can't just swap in `fitsInOneString()` for `isLong()`")
- res
+
+ (sevenBitsMayBeZero.length + numZeros) <= 65535
}
+
def sigAnnot: Type =
- if (this.isLong)
- definitions.ScalaLongSignatureAnnotation.tpe
- else
+ if (fitsInOneString)
definitions.ScalaSignatureAnnotation.tpe
+ else
+ definitions.ScalaLongSignatureAnnotation.tpe
private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = {
var i = 0
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 9daf9504f1..e3498a95a6 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
// todo implement in terms of BitSet
@@ -65,15 +66,14 @@ trait BaseTypeSeqs {
pending += i
try {
mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match {
- case Some(tp0) =>
+ case NoType => typeError("no common type instance of base types "+(variants mkString ", and ")+" exists.")
+ case tp0 =>
pending(i) = false
elems(i) = tp0
tp0
- case None =>
- typeError(
- "no common type instance of base types "+(variants mkString ", and ")+" exists.")
}
- } catch {
+ }
+ catch {
case CyclicInheritance =>
typeError(
"computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
@@ -180,7 +180,7 @@ trait BaseTypeSeqs {
def nextRawElem(i: Int): Type = {
val j = index(i)
val pbts = pbtss(i)
- if (j < pbts.length) pbts.rawElem(j) else AnyClass.tpe
+ if (j < pbts.length) pbts.rawElem(j) else AnyTpe
}
var minSym: Symbol = NoSymbol
while (minSym != AnyClass) {
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 175943d264..ece2d28be3 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
trait BuildUtils { self: SymbolTable =>
diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala
index c262c8474a..ef9646b80f 100644
--- a/src/reflect/scala/reflect/internal/CapturedVariables.scala
+++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -29,7 +30,7 @@ trait CapturedVariables { self: SymbolTable =>
def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
if (isPrimitiveValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
else if (erasedTypes) objectRefClass.tpe
- else appliedType(objectRefClass, tpe)
+ else appliedType(objectRefClass, tpe1)
if (vble.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
else refType(refClass, ObjectRefClass)
}
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
index 2d07092862..74413fdaba 100644
--- a/src/reflect/scala/reflect/internal/Chars.scala
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -2,7 +2,8 @@
* Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.annotation.{ tailrec, switch }
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index faf61e5205..a7ce044780 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -3,13 +3,13 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.annotation.switch
object ClassfileConstants {
-
final val JAVA_MAGIC = 0xCAFEBABE
final val JAVA_MAJOR_VERSION = 45
final val JAVA_MINOR_VERSION = 3
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 5ed2f675b2..511b39b8c6 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import java.lang.Integer.toOctalString
@@ -62,17 +63,17 @@ trait Constants extends api.Constants {
def isAnyVal = UnitTag <= tag && tag <= DoubleTag
def tpe: Type = tag match {
- case UnitTag => UnitClass.tpe
- case BooleanTag => BooleanClass.tpe
- case ByteTag => ByteClass.tpe
- case ShortTag => ShortClass.tpe
- case CharTag => CharClass.tpe
- case IntTag => IntClass.tpe
- case LongTag => LongClass.tpe
- case FloatTag => FloatClass.tpe
- case DoubleTag => DoubleClass.tpe
- case StringTag => StringClass.tpe
- case NullTag => NullClass.tpe
+ case UnitTag => UnitTpe
+ case BooleanTag => BooleanTpe
+ case ByteTag => ByteTpe
+ case ShortTag => ShortTpe
+ case CharTag => CharTpe
+ case IntTag => IntTpe
+ case LongTag => LongTpe
+ case FloatTag => FloatTpe
+ case DoubleTag => DoubleTpe
+ case StringTag => StringTpe
+ case NullTag => NullTpe
case ClazzTag => ClassType(typeValue)
case EnumTag => EnumType(symbolValue)
}
@@ -211,7 +212,7 @@ trait Constants extends api.Constants {
case '"' => "\\\""
case '\'' => "\\\'"
case '\\' => "\\\\"
- case _ => if (ch.isControl) "\\0" + toOctalString(ch) else String.valueOf(ch)
+ case _ => if (ch.isControl) "\\0" + toOctalString(ch.toInt) else String.valueOf(ch)
}
def escapedStringValue: String = {
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 5392daf674..4f2b7e2642 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -3,9 +3,11 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
+import scala.language.postfixOps
import scala.annotation.{ switch, meta }
import scala.collection.{ mutable, immutable }
import Flags._
@@ -109,8 +111,10 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a numeric value class? */
def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym
- def isGetClass(sym: Symbol) =
- (sym.name == nme.getClass_) && flattensToEmpty(sym.paramss)
+ def isGetClass(sym: Symbol) = (
+ sym.name == nme.getClass_ // this condition is for performance only, this is called from `Typer#stabliize`.
+ && getClassMethods(sym)
+ )
lazy val UnitClass = valueClassSymbol(tpnme.Unit)
lazy val ByteClass = valueClassSymbol(tpnme.Byte)
@@ -125,15 +129,15 @@ trait Definitions extends api.StandardDefinitions {
lazy val Boolean_or = getMemberMethod(BooleanClass, nme.ZOR)
lazy val Boolean_not = getMemberMethod(BooleanClass, nme.UNARY_!)
- lazy val UnitTpe = UnitClass.toTypeConstructor
- lazy val ByteTpe = ByteClass.toTypeConstructor
- lazy val ShortTpe = ShortClass.toTypeConstructor
- lazy val CharTpe = CharClass.toTypeConstructor
- lazy val IntTpe = IntClass.toTypeConstructor
- lazy val LongTpe = LongClass.toTypeConstructor
- lazy val FloatTpe = FloatClass.toTypeConstructor
- lazy val DoubleTpe = DoubleClass.toTypeConstructor
- lazy val BooleanTpe = BooleanClass.toTypeConstructor
+ lazy val UnitTpe = UnitClass.tpe
+ lazy val ByteTpe = ByteClass.tpe
+ lazy val ShortTpe = ShortClass.tpe
+ lazy val CharTpe = CharClass.tpe
+ lazy val IntTpe = IntClass.tpe
+ lazy val LongTpe = LongClass.tpe
+ lazy val FloatTpe = FloatClass.tpe
+ lazy val DoubleTpe = DoubleClass.tpe
+ lazy val BooleanTpe = BooleanClass.tpe
lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
lazy val ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
@@ -176,16 +180,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val RuntimePackage = getPackage("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass
- // convenient one-argument parameter lists
- lazy val anyparam = List(AnyClass.tpe)
- lazy val anyvalparam = List(AnyValClass.typeConstructor)
- lazy val anyrefparam = List(AnyRefClass.typeConstructor)
-
- // private parameter conveniences
- private def booltype = BooleanClass.tpe
- private def inttype = IntClass.tpe
- private def stringtype = StringClass.tpe
-
def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
case java.lang.Void.TYPE => UnitClass
case java.lang.Byte.TYPE => ByteClass
@@ -215,13 +209,17 @@ trait Definitions extends api.StandardDefinitions {
*/
def fullyInitializeSymbol(sym: Symbol): Symbol = {
sym.initialize
+ // Watch out for those darn raw types on method parameters
+ if (sym.owner.initialize.isJavaDefined)
+ sym.cookJavaRawInfo()
+
fullyInitializeType(sym.info)
fullyInitializeType(sym.tpe_*)
sym
}
def fullyInitializeType(tp: Type): Type = {
tp.typeParams foreach fullyInitializeSymbol
- tp.paramss.flatten foreach fullyInitializeSymbol
+ mforeach(tp.paramss)(fullyInitializeSymbol)
tp
}
def fullyInitializeScope(scope: Scope): Scope = {
@@ -250,9 +248,9 @@ trait Definitions extends api.StandardDefinitions {
/** Is this type equivalent to Any, AnyVal, or AnyRef? */
def isTrivialTopType(tp: Type) = (
- tp =:= AnyClass.tpe
- || tp =:= AnyValClass.tpe
- || tp =:= AnyRefClass.tpe
+ tp =:= AnyTpe
+ || tp =:= AnyValTpe
+ || tp =:= AnyRefTpe
)
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
@@ -260,7 +258,7 @@ trait Definitions extends api.StandardDefinitions {
if (parents.head.typeSymbol == AnyClass) tpe
else {
assert(parents.head.typeSymbol == ObjectClass, parents)
- ClassInfoType(AnyClass.tpe :: parents.tail, decls, clazz)
+ ClassInfoType(AnyTpe :: parents.tail, decls, clazz)
}
case PolyType(tparams, restpe) =>
PolyType(tparams, fixupAsAnyTrait(restpe))
@@ -268,11 +266,20 @@ trait Definitions extends api.StandardDefinitions {
// top types
lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
- lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
+ lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)
lazy val ObjectClass = getRequiredClass(sn.Object.toString)
- lazy val AnyTpe = definitions.AnyClass.toTypeConstructor
- lazy val AnyRefTpe = definitions.AnyRefClass.toTypeConstructor
- lazy val ObjectTpe = definitions.ObjectClass.toTypeConstructor
+
+ // Cached types for core monomorphic classes
+ lazy val AnyRefTpe = AnyRefClass.tpe
+ lazy val AnyTpe = AnyClass.tpe
+ lazy val AnyValTpe = AnyValClass.tpe
+ lazy val BoxedUnitTpe = BoxedUnitClass.tpe
+ lazy val NothingTpe = NothingClass.tpe
+ lazy val NullTpe = NullClass.tpe
+ lazy val ObjectTpe = ObjectClass.tpe
+ lazy val SerializableTpe = SerializableClass.tpe
+ lazy val StringTpe = StringClass.tpe
+ lazy val ThrowableTpe = ThrowableClass.tpe
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
@@ -281,12 +288,11 @@ trait Definitions extends api.StandardDefinitions {
def Predef_AnyRef = AnyRefModule
lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
- val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyClass.tpe :: Nil, ABSTRACT)
+ val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
anyval.info.decls enter av_constr
anyval
}).asInstanceOf[ClassSymbol]
- lazy val AnyValTpe = definitions.AnyValClass.toTypeConstructor
def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
// bottom types
@@ -309,8 +315,6 @@ trait Definitions extends api.StandardDefinitions {
|| (that ne NothingClass) && (that isSubClass ObjectClass)
)
}
- lazy val NothingTpe = definitions.NothingClass.toTypeConstructor
- lazy val NullTpe = definitions.NullClass.toTypeConstructor
// exceptions and other throwables
lazy val ClassCastExceptionClass = requiredClass[ClassCastException]
@@ -322,7 +326,9 @@ trait Definitions extends api.StandardDefinitions {
lazy val ThrowableClass = getClassByName(sn.Throwable)
lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError]
+ @deprecated("Same effect but more compact: `throw null`. Details in JVM spec, `athrow` instruction.", "2.11.0")
lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty)
+
lazy val UninitializedFieldConstructor = UninitializedErrorClass.primaryConstructor
// fundamental reference classes
@@ -389,7 +395,7 @@ trait Definitions extends api.StandardDefinitions {
def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
- lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
+ lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL)
lazy val SerializableClass = requiredClass[scala.Serializable]
lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
@@ -399,7 +405,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
- lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe)
+ lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe)
lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe))
lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.tpe))
@@ -423,24 +429,15 @@ trait Definitions extends api.StandardDefinitions {
case _ => false
}
- def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
- case _ => tp
- }
-
- def seqToRepeated(tp: Type): Type = (tp baseType SeqClass) match {
- case TypeRef(_, SeqClass, arg :: Nil) => scalaRepeatedType(arg)
- case _ => tp
- }
-
- def isReferenceArray(tp: Type) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe
- case _ => false
- }
- def isArrayOfSymbol(tp: Type, elem: Symbol) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => arg.typeSymbol == elem
- case _ => false
- }
+ // wrapping and unwrapping
+ def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse tp
+ def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp
+ def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp
+ def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(_ <:< AnyRefTpe)
+ def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem)
+ def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp)
+ object ExprClassOf { def unapply(tp: Type): Option[Type] = elementExtractOption(ExprClass, tp) }
// collections classes
lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
@@ -478,8 +475,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type]
lazy val ReflectApiPackage = getPackageObjectIfDefined("scala.reflect.api") // defined in scala-reflect.jar, so we need to be careful
lazy val ReflectRuntimePackage = getPackageObjectIfDefined("scala.reflect.runtime") // defined in scala-reflect.jar, so we need to be careful
- def ReflectRuntimeUniverse = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
- def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
+ def ReflectRuntimeUniverse = ReflectRuntimePackage.map(sym => getMemberValue(sym, nme.universe))
+ def ReflectRuntimeCurrentMirror = ReflectRuntimePackage.map(sym => getMemberMethod(sym, nme.currentMirror))
lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
@@ -488,24 +485,31 @@ trait Definitions extends api.StandardDefinitions {
lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
+ lazy val TreesClass = getClassIfDefined("scala.reflect.api.Trees") // defined in scala-reflect.jar, so we need to be careful
+ lazy val TreesTreeType = TreesClass.map(sym => getTypeMember(sym, tpnme.Tree))
+ object TreeType {
+ def unapply(tpe: Type): Boolean = unapply(tpe.typeSymbol)
+ def unapply(sym: Symbol): Boolean = sym.overrideChain contains TreesTreeType
+ }
+
lazy val ExprsClass = getClassIfDefined("scala.reflect.api.Exprs") // defined in scala-reflect.jar, so we need to be careful
- lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol
- def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol
- def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol
+ lazy val ExprClass = ExprsClass.map(sym => getMemberClass(sym, tpnme.Expr))
+ def ExprSplice = ExprClass.map(sym => getMemberMethod(sym, nme.splice))
+ def ExprValue = ExprClass.map(sym => getMemberMethod(sym, nme.value))
lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
lazy val TypeTagsClass = getClassIfDefined("scala.reflect.api.TypeTags") // defined in scala-reflect.jar, so we need to be careful
- lazy val WeakTypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.WeakTypeTag) else NoSymbol
- lazy val WeakTypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.WeakTypeTag) else NoSymbol
- lazy val TypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.TypeTag) else NoSymbol
- lazy val TypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.TypeTag) else NoSymbol
+ lazy val WeakTypeTagClass = TypeTagsClass.map(sym => getMemberClass(sym, tpnme.WeakTypeTag))
+ lazy val WeakTypeTagModule = TypeTagsClass.map(sym => getMemberModule(sym, nme.WeakTypeTag))
+ lazy val TypeTagClass = TypeTagsClass.map(sym => getMemberClass(sym, tpnme.TypeTag))
+ lazy val TypeTagModule = TypeTagsClass.map(sym => getMemberModule(sym, nme.TypeTag))
def materializeClassTag = getMemberMethod(ReflectPackage, nme.materializeClassTag)
- def materializeWeakTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeWeakTypeTag) else NoSymbol
- def materializeTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeTypeTag) else NoSymbol
+ def materializeWeakTypeTag = ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeWeakTypeTag))
+ def materializeTypeTag = ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeTypeTag))
lazy val ApiUniverseClass = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful
- def ApiUniverseReify = if (ApiUniverseClass != NoSymbol) getMemberMethod(ApiUniverseClass, nme.reify) else NoSymbol
+ def ApiUniverseReify = ApiUniverseClass.map(sym => getMemberMethod(sym, nme.reify))
lazy val JavaUniverseClass = getClassIfDefined("scala.reflect.api.JavaUniverse") // defined in scala-reflect.jar, so we need to be careful
lazy val MirrorClass = getClassIfDefined("scala.reflect.api.Mirror") // defined in scala-reflect.jar, so we need to be careful
@@ -513,14 +517,18 @@ trait Definitions extends api.StandardDefinitions {
lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
- lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
- def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol
- def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol
- def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol
- lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
+ lazy val MacroClass = getClassIfDefined("scala.reflect.macros.Macro") // defined in scala-reflect.jar, so we need to be careful
+ lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
+ def MacroContextPrefix = MacroContextClass.map(sym => getMemberMethod(sym, nme.prefix))
+ def MacroContextPrefixType = MacroContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType))
+ def MacroContextUniverse = MacroContextClass.map(sym => getMemberMethod(sym, nme.universe))
+ def MacroContextExprClass = MacroContextClass.map(sym => getTypeMember(sym, tpnme.Expr))
+ def MacroContextWeakTypeTagClass = MacroContextClass.map(sym => getTypeMember(sym, tpnme.WeakTypeTag))
+ def MacroContextTreeType = MacroContextClass.map(sym => getTypeMember(sym, tpnme.Tree))
+ lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
- lazy val StringContextClass = requiredClass[scala.StringContext]
- def StringContext_f = getMemberMethod(StringContextClass, nme.f)
+ lazy val StringContextClass = requiredClass[scala.StringContext]
+ def StringContext_f = getMemberMethod(StringContextClass, nme.f)
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
@@ -584,7 +592,7 @@ trait Definitions extends api.StandardDefinitions {
case BooleanClass => nme.wrapBooleanArray
case UnitClass => nme.wrapUnitArray
case _ =>
- if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
+ if ((elemtp <:< AnyRefTpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
else nme.genericWrapArray
}
@@ -627,6 +635,12 @@ trait Definitions extends api.StandardDefinitions {
}
def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
+ def isMacroBundleType(tp: Type) = {
+ val isNonTrivial = tp != ErrorType && tp != NothingTpe && tp != NullTpe
+ val isMacroCompatible = MacroClass != NoSymbol && tp <:< MacroClass.tpe
+ isNonTrivial && isMacroCompatible
+ }
+
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement)
@@ -670,11 +684,6 @@ trait Definitions extends api.StandardDefinitions {
(sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
}
- def elementType(container: Symbol, tp: Type): Type = tp match {
- case TypeRef(_, `container`, arg :: Nil) => arg
- case _ => NoType
- }
-
def arrayType(arg: Type) = appliedType(ArrayClass, arg)
def byNameType(arg: Type) = appliedType(ByNameParamClass, arg)
def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp)
@@ -720,12 +729,12 @@ trait Definitions extends api.StandardDefinitions {
existentialAbstraction(clazz.unsafeTypeParams, clazz.tpe_*)
// members of class scala.Any
- lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
- lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, anyparam, booltype, FINAL)
- lazy val Any_equals = enterNewMethod(AnyClass, nme.equals_, anyparam, booltype)
- lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, inttype)
- lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, stringtype)
- lazy val Any_## = enterNewMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL)
+ lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Any_equals = enterNewMethod(AnyClass, nme.equals_, AnyTpe :: Nil, BooleanTpe)
+ lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, IntTpe)
+ lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, StringTpe)
+ lazy val Any_## = enterNewMethod(AnyClass, nme.HASHHASH, Nil, IntTpe, FINAL)
// Any_getClass requires special handling. The return type is determined on
// a per-call-site basis as if the function being called were actually:
@@ -737,9 +746,15 @@ trait Definitions extends api.StandardDefinitions {
// participation. At the "Any" level, the return type is Class[_] as it is in
// java.lang.Object. Java also special cases the return type.
lazy val Any_getClass = enterNewMethod(AnyClass, nme.getClass_, Nil, getMemberMethod(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED)
- lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype)
+ lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => BooleanTpe)
lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
+ lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
+ ScalaValueClasses map (_.tpe member nme.getClass_)
+ )
+
+ lazy val getClassMethods: Set[Symbol] = primitiveGetClassMethods + Object_getClass
+
// A type function from T => Class[U], used to determine the return
// type of getClass calls. The returned type is:
//
@@ -761,7 +776,7 @@ trait Definitions extends api.StandardDefinitions {
else {
val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
val upperBound = (
- if (isPhantomClass(sym)) AnyClass.tpe
+ if (isPhantomClass(sym)) AnyTpe
else if (sym.isLocalClass) erasure.intersectionDominator(tp.parents)
else tp.widen
)
@@ -821,17 +836,17 @@ trait Definitions extends api.StandardDefinitions {
}
// members of class java.lang.{ Object, String }
- lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
- lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
- lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL)
- lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL)
- lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL)
- lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_ => booltype)
+ lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, IntTpe, FINAL)
+ lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_ => BooleanTpe)
lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_.typeConstructor)
lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps =>
(Some(List(tps.head.typeConstructor)), tps.head.typeConstructor)
)
- lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL)
+ lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, AnyTpe :: Nil, StringTpe, FINAL)
def Object_getClass = getMemberMethod(ObjectClass, nme.getClass_)
def Object_clone = getMemberMethod(ObjectClass, nme.clone_)
@@ -1031,7 +1046,7 @@ trait Definitions extends api.StandardDefinitions {
private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): ClassSymbol = {
val clazz = enterNewClass(ScalaPackageClass, name, Nil)
val tparam = clazz.newSyntheticTypeParam("T0", flags)
- val parents = List(AnyRefClass.tpe, parentFn(tparam))
+ val parents = List(AnyRefTpe, parentFn(tparam))
clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
}
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 281a32caf6..073f124630 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
@@ -31,4 +32,81 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
+
+ def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
+ sym.isTypeParameter && sym.owner.isJavaDefined
+
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ mapFrom(hidden)(s => s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }
+
+ /** Given a set `rawSyms` of term- and type-symbols, and a type
+ * `tp`, produce a set of fresh type parameters and a type so that
+ * it can be abstracted to an existential type. Every type symbol
+ * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
+ * type `T` in `rawSyms` is given an associated type symbol of the
+ * following form:
+ *
+ * type x.type <: T with Singleton
+ *
+ * The name of the type parameter is `x.type`, to produce nice
+ * diagnostics. The Singleton parent ensures that the type
+ * parameter is still seen as a stable type. Type symbols in
+ * rawSyms are fully replaced by the new symbols. Term symbols are
+ * also replaced, except for term symbols of an Ident tree, where
+ * only the type of the Ident is changed.
+ */
+ final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Symbol = NoSymbol)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
+ val typeParams: List[Symbol] = rawSyms map { sym =>
+ val name = sym.name match {
+ case x: TypeName => x
+ case x => tpnme.singletonName(x)
+ }
+ def rawOwner0 = rawOwner orElse abort(s"no owner provided for existential transform over raw parameter: $sym")
+ val bound = allBounds(sym)
+ val sowner = if (isRawParameter(sym)) rawOwner0 else sym.owner
+ val quantified = sowner.newExistential(name, sym.pos)
+
+ quantified setInfo bound.cloneInfo(quantified)
+ }
+ // Higher-kinded existentials are not yet supported, but this is
+ // tpeHK for when they are: "if a type constructor is expected/allowed,
+ // tpeHK must be called instead of tpe."
+ val typeParamTypes = typeParams map (_.tpeHK)
+ def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
+
+ creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
+ }
+
+ /**
+ * Compute an existential type from hidden symbols `hidden` and type `tp`.
+ * @param hidden The symbols that will be existentially abstracted
+ * @param hidden The original type
+ * @param rawOwner The owner for Java raw types.
+ */
+ final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Symbol = NoSymbol): Type =
+ if (hidden.isEmpty) tp
+ else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction)
}
diff --git a/src/reflect/scala/reflect/internal/FatalError.scala b/src/reflect/scala/reflect/internal/FatalError.scala
index a084fc24f3..08a9a635af 100644
--- a/src/reflect/scala/reflect/internal/FatalError.scala
+++ b/src/reflect/scala/reflect/internal/FatalError.scala
@@ -2,5 +2,6 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect.internal
+package scala
+package reflect.internal
case class FatalError(msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index 6a3b6870a0..961adb2c57 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import scala.language.implicitConversions
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index fe46a0471e..b8e3407824 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
@@ -59,9 +60,9 @@ import scala.collection.{ mutable, immutable }
// 42: VBRIDGE
// 43: VARARGS
// 44: TRIEDCOOKING
-// 45:
-// 46:
-// 47:
+// 45: SYNCHRONIZED/M
+// 46: ARTIFACT
+// 47: DEFAULTMETHOD/M
// 48:
// 49:
// 50:
@@ -117,6 +118,7 @@ class ModifierFlags {
final val PRESUPER = 1L << 37 // value is evaluated before super call
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+ final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
/** Symbols which are marked ARTIFACT. (Expand this list?)
*
@@ -252,7 +254,7 @@ class Flags extends ModifierFlags {
*/
final val ExplicitFlags =
PRIVATE | PROTECTED | ABSTRACT | FINAL | SEALED |
- OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY
+ OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY | DEFAULTMETHOD
/** The two bridge flags */
final val BridgeFlags = BRIDGE | VBRIDGE
@@ -438,7 +440,7 @@ class Flags extends ModifierFlags {
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
case ARTIFACT => "<artifact>" // (1L << 46)
- case 0x800000000000L => "" // (1L << 47)
+ case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
case 0x4000000000000L => "" // (1L << 50)
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index 6f8befd23e..420b5fef81 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index f736202e13..f8584ac9b0 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -1,21 +1,22 @@
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.mutable.WeakHashMap
import scala.ref.WeakReference
// SI-6241: move importers to a mirror
-trait Importers extends api.Importers { self: SymbolTable =>
+trait Importers extends api.Importers { to: SymbolTable =>
def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
- if (self eq from0) {
+ if (to eq from0) {
new Importer {
val from = from0
- val reverse = this.asInstanceOf[from.Importer{ val from: self.type }]
- def importSymbol(sym: from.Symbol) = sym.asInstanceOf[self.Symbol]
- def importType(tpe: from.Type) = tpe.asInstanceOf[self.Type]
- def importTree(tree: from.Tree) = tree.asInstanceOf[self.Tree]
- def importPosition(pos: from.Position) = pos.asInstanceOf[self.Position]
+ val reverse = this.asInstanceOf[from.Importer{ val from: to.type }]
+ def importSymbol(their: from.Symbol) = their.asInstanceOf[to.Symbol]
+ def importType(their: from.Type) = their.asInstanceOf[to.Type]
+ def importTree(their: from.Tree) = their.asInstanceOf[to.Tree]
+ def importPosition(their: from.Position) = their.asInstanceOf[to.Position]
}
} else {
// todo. fix this loophole
@@ -28,8 +29,8 @@ trait Importers extends api.Importers { self: SymbolTable =>
val from: SymbolTable
- protected lazy val symMap = new Cache[from.Symbol, Symbol]()
- protected lazy val tpeMap = new Cache[from.Type, Type]()
+ protected lazy val symMap = new Cache[from.Symbol, to.Symbol]()
+ protected lazy val tpeMap = new Cache[from.Type, to.Type]()
protected class Cache[K <: AnyRef, V <: AnyRef] extends WeakHashMap[K, WeakReference[V]] {
def weakGet(key: K): Option[V] = this get key flatMap WeakReference.unapply
def weakUpdate(key: K, value: V) = this.update(key, WeakReference(value))
@@ -49,158 +50,162 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
object reverse extends from.StandardImporter {
- val from: self.type = self
+ val from: to.type = to
// FIXME this and reverse should be constantly kept in sync
// not just synced once upon the first usage of reverse
- for ((fromsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(fromsym)))
- for ((fromtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(fromtpe)))
+ for ((theirsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(theirsym)))
+ for ((theirtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(theirtpe)))
}
- // todo. careful import of positions
- def importPosition(pos: from.Position): Position =
- pos.asInstanceOf[Position]
+ // ============== SYMBOLS ==============
+
+ protected def recreatedSymbolCompleter(my: to.Symbol, their: from.Symbol) = {
+ // we lock the symbol that is imported for a very short period of time
+ // i.e. only for when type parameters of the symbol are being imported
+ // the lock is used to communicate to the recursive importSymbol calls
+ // that type parameters need to be created from scratch
+ // because otherwise type parameters are imported by looking into owner.typeParams
+ // which is obviously unavailable while the completer is being created
+ try {
+ my setFlag Flags.LOCKED
+ val mytypeParams = their.typeParams map importSymbol
+ new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
+ override def complete(my: to.Symbol): Unit = {
+ val theirCore = their.info match {
+ case from.PolyType(_, core) => core
+ case core => core
+ }
+ my setInfo GenPolyType(mytypeParams, importType(theirCore))
+ my setAnnotations (their.annotations map importAnnotationInfo)
+ }
+ }
+ } finally {
+ my resetFlag Flags.LOCKED
+ }
+ }
+
+ protected def recreateSymbol(their: from.Symbol): to.Symbol = {
+ val myowner = importSymbol(their.owner)
+ val mypos = importPosition(their.pos)
+ val myname = importName(their.name)
+ val myflags = their.flags
+ def linkReferenced(my: TermSymbol, their: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
+ symMap.weakUpdate(their, my)
+ my.referenced = op(their.referenced)
+ my
+ }
+ val my = their match {
+ case their: from.MethodSymbol =>
+ linkReferenced(myowner.newMethod(myname.toTermName, mypos, myflags), their, importSymbol)
+ case their: from.ModuleSymbol =>
+ val ret = linkReferenced(myowner.newModuleSymbol(myname.toTermName, mypos, myflags), their, importSymbol)
+ ret.associatedFile = their.associatedFile
+ ret
+ case their: from.FreeTermSymbol =>
+ newFreeTermSymbol(myname.toTermName, their.value, their.flags, their.origin) setInfo importType(their.info)
+ case their: from.FreeTypeSymbol =>
+ newFreeTypeSymbol(myname.toTypeName, their.flags, their.origin)
+ case their: from.TermSymbol =>
+ linkReferenced(myowner.newValue(myname.toTermName, mypos, myflags), their, importSymbol)
+ case their: from.TypeSkolem =>
+ val origin = their.unpackLocation match {
+ case null => null
+ case theirloc: from.Tree => importTree(theirloc)
+ case theirloc: from.Symbol => importSymbol(theirloc)
+ }
+ myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
+ case their: from.ModuleClassSymbol =>
+ val my = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(their, my)
+ my.sourceModule = importSymbol(their.sourceModule)
+ my
+ case their: from.ClassSymbol =>
+ val my = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(their, my)
+ if (their.thisSym != their) {
+ my.typeOfThis = importType(their.typeOfThis)
+ my.thisSym setName importName(their.thisSym.name)
+ }
+ my.associatedFile = their.associatedFile
+ my
+ case their: from.TypeSymbol =>
+ myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
+ }
+ symMap.weakUpdate(their, my)
+ my setInfo recreatedSymbolCompleter(my, their)
+ }
- def importSymbol(sym0: from.Symbol): Symbol = {
- def doImport(sym: from.Symbol): Symbol =
- symMap weakGet sym match {
+ def importSymbol(their0: from.Symbol): Symbol = {
+ def cachedRecreateSymbol(their: from.Symbol): Symbol =
+ symMap weakGet their match {
case Some(result) => result
- case _ =>
- val myowner = importSymbol(sym.owner)
- val mypos = importPosition(sym.pos)
- val myname = importName(sym.name).toTermName
- val myflags = sym.flags
- def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
- symMap.weakUpdate(x, mysym)
- mysym.referenced = op(x.referenced)
- mysym
- }
- val mysym = sym match {
- case x: from.MethodSymbol =>
- linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
- case x: from.ModuleSymbol =>
- linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
- case x: from.FreeTermSymbol =>
- newFreeTermSymbol(importName(x.name).toTermName, x.value, x.flags, x.origin) setInfo importType(x.info)
- case x: from.FreeTypeSymbol =>
- newFreeTypeSymbol(importName(x.name).toTypeName, x.flags, x.origin)
- case x: from.TermSymbol =>
- linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
- case x: from.TypeSkolem =>
- val origin = x.unpackLocation match {
- case null => null
- case y: from.Tree => importTree(y)
- case y: from.Symbol => importSymbol(y)
- }
- myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
- case x: from.ModuleClassSymbol =>
- val mysym = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
- symMap.weakUpdate(x, mysym)
- mysym.sourceModule = importSymbol(x.sourceModule)
- mysym
- case x: from.ClassSymbol =>
- val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
- symMap.weakUpdate(x, mysym)
- if (sym.thisSym != sym) {
- mysym.typeOfThis = importType(sym.typeOfThis)
- mysym.thisSym setName importName(sym.thisSym.name)
- }
- mysym
- case x: from.TypeSymbol =>
- myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
- }
- symMap.weakUpdate(sym, mysym)
- mysym setFlag Flags.LOCKED
- mysym setInfo {
- val mytypeParams = sym.typeParams map importSymbol
- new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
- override def complete(s: Symbol) {
- val result = sym.info match {
- case from.PolyType(_, res) => res
- case result => result
- }
- s setInfo GenPolyType(mytypeParams, importType(result))
- s setAnnotations (sym.annotations map importAnnotationInfo)
- }
- }
- }
- mysym resetFlag Flags.LOCKED
- } // end doImport
+ case _ => recreateSymbol(their)
+ }
- def importOrRelink: Symbol = {
- val sym = sym0 // makes sym visible in the debugger
- if (sym == null)
+ def recreateOrRelink: Symbol = {
+ val their = their0 // makes their visible in the debugger
+ if (their == null)
null
- else if (sym == from.NoSymbol)
+ else if (their == from.NoSymbol)
NoSymbol
- else if (sym.isRoot)
+ else if (their.isRoot)
rootMirror.RootClass // !!! replace with actual mirror when we move importers to the mirror
else {
- val name = sym.name
- val owner = sym.owner
- var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType
- var existing = scope.decl(name)
- if (sym.isModuleClass)
- existing = existing.moduleClass
-
- if (!existing.exists) scope = from.NoType
-
- val myname = importName(name)
- val myowner = importSymbol(owner)
- val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
- var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods
- if (sym.isModuleClass)
- myexisting = importSymbol(sym.sourceModule).moduleClass
-
- if (!sym.isOverloaded && myexisting.isOverloaded) {
- myexisting =
- if (sym.isMethod) {
- val localCopy = doImport(sym)
- myexisting filter (_.tpe matches localCopy.tpe)
- } else {
- myexisting filter (!_.isMethod)
+ val isModuleClass = their.isModuleClass
+ val isTparam = their.isTypeParameter && their.paramPos >= 0
+ val isOverloaded = their.isOverloaded
+
+ var theirscope = if (their.owner.isClass && !their.owner.isRefinementClass) their.owner.info else from.NoType
+ val theirexisting = if (isModuleClass) theirscope.decl(their.name).moduleClass else theirscope.decl(their.name)
+ if (!theirexisting.exists) theirscope = from.NoType
+
+ val myname = importName(their.name)
+ val myowner = importSymbol(their.owner)
+ val myscope = if (theirscope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
+ val myexisting = {
+ if (isModuleClass) importSymbol(their.sourceModule).moduleClass
+ else if (isTparam) (if (myowner hasFlag Flags.LOCKED) NoSymbol else myowner.typeParams(their.paramPos))
+ else if (isOverloaded) myowner.newOverloaded(myowner.thisType, their.alternatives map importSymbol)
+ else {
+ def disambiguate(my: Symbol) = {
+ val result =
+ if (their.isMethod) {
+ val localCopy = cachedRecreateSymbol(their)
+ my filter (_.tpe matches localCopy.tpe)
+ } else {
+ my filter (!_.isMethod)
+ }
+ assert(!result.isOverloaded,
+ "import failure: cannot determine unique overloaded method alternative from\n "+
+ (result.alternatives map (_.defString) mkString "\n")+"\n that matches "+their+":"+their.tpe)
+ result
}
- assert(!myexisting.isOverloaded,
- "import failure: cannot determine unique overloaded method alternative from\n "+
- (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
- }
- val mysym = {
- if (sym.isOverloaded) {
- myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
- } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
- assert(myowner.typeParams.length > sym.paramPos,
- "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
- myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
- sym.owner+from.typeParamsString(sym.owner.info))
- myowner.typeParams(sym.paramPos)
- } else {
- if (myexisting != NoSymbol) {
- myexisting
- } else {
- val mysym = doImport(sym)
-
- if (myscope != NoType) {
- assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting)
- myowner.info.decls enter mysym
- }
-
- mysym
- }
+ val myexisting = if (myscope != NoType) myscope.decl(myname) else NoSymbol
+ if (myexisting.isOverloaded) disambiguate(myexisting)
+ else myexisting
}
}
- mysym
+ myexisting.orElse {
+ val my = cachedRecreateSymbol(their)
+ if (myscope != NoType) {
+ assert(myscope.decls.lookup(myname) == NoSymbol, myname+" "+myscope.decl(myname)+" "+myexisting)
+ myscope.decls enter my
+ }
+ my
+ }
}
- } // end importOrRelink
+ } // end recreateOrRelink
- val sym = sym0
- symMap.weakGet(sym) match {
+ val their = their0
+ symMap.weakGet(their) match {
case Some(result) => result
case None =>
pendingSyms += 1
try {
- val result = importOrRelink
- symMap.weakUpdate(sym, result)
+ val result = recreateOrRelink
+ symMap.weakUpdate(their, result)
result
} finally {
pendingSyms -= 1
@@ -209,69 +214,70 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
}
- def importType(tpe: from.Type): Type = {
- def doImport(tpe: from.Type): Type = tpe match {
- case from.TypeRef(pre, sym, args) =>
- TypeRef(importType(pre), importSymbol(sym), args map importType)
- case from.ThisType(clazz) =>
- ThisType(importSymbol(clazz))
- case from.SingleType(pre, sym) =>
- SingleType(importType(pre), importSymbol(sym))
- case from.MethodType(params, restpe) =>
- MethodType(params map importSymbol, importType(restpe))
- case from.PolyType(tparams, restpe) =>
- PolyType(tparams map importSymbol, importType(restpe))
- case from.NullaryMethodType(restpe) =>
- NullaryMethodType(importType(restpe))
- case from.ConstantType(constant @ from.Constant(_)) =>
- ConstantType(importConstant(constant))
- case from.SuperType(thistpe, supertpe) =>
- SuperType(importType(thistpe), importType(supertpe))
- case from.TypeBounds(lo, hi) =>
- TypeBounds(importType(lo), importType(hi))
- case from.BoundedWildcardType(bounds) =>
- BoundedWildcardType(importTypeBounds(bounds))
- case from.ClassInfoType(parents, decls, clazz) =>
- val myclazz = importSymbol(clazz)
- val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
- val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
- myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
- decls foreach importSymbol // will enter itself into myclazz
- myclazzTpe
- case from.RefinedType(parents, decls) =>
- RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
- case from.ExistentialType(tparams, restpe) =>
- newExistentialType(tparams map importSymbol, importType(restpe))
- case from.OverloadedType(pre, alts) =>
- OverloadedType(importType(pre), alts map importSymbol)
- case from.AntiPolyType(pre, targs) =>
- AntiPolyType(importType(pre), targs map importType)
- case x: from.TypeVar =>
- TypeVar(importType(x.origin), importTypeConstraint(x.constr), x.typeArgs map importType, x.params map importSymbol)
- case from.AnnotatedType(annots, tpe, selfsym) =>
- AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
- case from.ErrorType =>
- ErrorType
- case from.WildcardType =>
- WildcardType
- case from.NoType =>
- NoType
- case from.NoPrefix =>
- NoPrefix
- case null =>
- null
- } // end doImport
-
- def importOrRelink: Type =
- doImport(tpe)
+ // ============== TYPES ==============
+
+ def recreateType(their: from.Type): Type = their match {
+ case from.TypeRef(pre, sym, args) =>
+ TypeRef(importType(pre), importSymbol(sym), args map importType)
+ case from.ThisType(clazz) =>
+ ThisType(importSymbol(clazz))
+ case from.SingleType(pre, sym) =>
+ SingleType(importType(pre), importSymbol(sym))
+ case from.MethodType(params, result) =>
+ MethodType(params map importSymbol, importType(result))
+ case from.PolyType(tparams, result) =>
+ PolyType(tparams map importSymbol, importType(result))
+ case from.NullaryMethodType(result) =>
+ NullaryMethodType(importType(result))
+ case from.ConstantType(constant @ from.Constant(_)) =>
+ ConstantType(importConstant(constant))
+ case from.SuperType(thistpe, supertpe) =>
+ SuperType(importType(thistpe), importType(supertpe))
+ case from.TypeBounds(lo, hi) =>
+ TypeBounds(importType(lo), importType(hi))
+ case from.BoundedWildcardType(bounds) =>
+ BoundedWildcardType(importType(bounds).asInstanceOf[TypeBounds])
+ case from.ClassInfoType(parents, decls, clazz) =>
+ val myclazz = importSymbol(clazz)
+ val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
+ val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
+ myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
+ decls foreach importSymbol // will enter itself into myclazz
+ myclazzTpe
+ case from.RefinedType(parents, decls) =>
+ RefinedType(parents map importType, importScope(decls), importSymbol(their.typeSymbol))
+ case from.ExistentialType(tparams, result) =>
+ newExistentialType(tparams map importSymbol, importType(result))
+ case from.OverloadedType(pre, alts) =>
+ OverloadedType(importType(pre), alts map importSymbol)
+ case from.AntiPolyType(pre, targs) =>
+ AntiPolyType(importType(pre), targs map importType)
+ case their: from.TypeVar =>
+ val myconstr = new TypeConstraint(their.constr.loBounds map importType, their.constr.hiBounds map importType)
+ myconstr.inst = importType(their.constr.inst)
+ TypeVar(importType(their.origin), myconstr, their.typeArgs map importType, their.params map importSymbol)
+ case from.AnnotatedType(annots, result, selfsym) =>
+ AnnotatedType(annots map importAnnotationInfo, importType(result), importSymbol(selfsym))
+ case from.ErrorType =>
+ ErrorType
+ case from.WildcardType =>
+ WildcardType
+ case from.NoType =>
+ NoType
+ case from.NoPrefix =>
+ NoPrefix
+ case null =>
+ null
+ }
- tpeMap.weakGet(tpe) match {
+ def importType(their: from.Type): Type = {
+ tpeMap.weakGet(their) match {
case Some(result) => result
case None =>
pendingTpes += 1
try {
- val result = importOrRelink
- tpeMap.weakUpdate(tpe, result)
+ val result = recreateType(their)
+ tpeMap.weakUpdate(their, result)
result
} finally {
pendingTpes -= 1
@@ -280,7 +286,136 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
}
- def importTypeBounds(bounds: from.TypeBounds) = importType(bounds).asInstanceOf[TypeBounds]
+ // ============== TREES ==============
+
+ def recreatedTreeCompleter(their: from.Tree, my: to.Tree): Unit = {
+ if (their.canHaveAttrs) {
+ if (my.hasSymbolField) my.symbol = importSymbol(their.symbol)
+ my.pos = importPosition(their.pos)
+ (their, my) match {
+ case (their: from.TypeTree, my: to.TypeTree) =>
+ if (their.wasEmpty) my.defineType(importType(their.tpe)) else my.setType(importType(their.tpe))
+ case (_, _) =>
+ my.tpe = importType(their.tpe)
+ }
+ }
+ }
+
+ def recreateTree(their: from.Tree): to.Tree = their match {
+ case from.ClassDef(mods, name, tparams, impl) =>
+ new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
+ case from.PackageDef(pid, stats) =>
+ new PackageDef(importRefTree(pid), stats map importTree)
+ case from.ModuleDef(mods, name, impl) =>
+ new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
+ case from.emptyValDef =>
+ emptyValDef
+ case from.pendingSuperCall =>
+ pendingSuperCall
+ case from.ValDef(mods, name, tpt, rhs) =>
+ new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
+ case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
+ case from.TypeDef(mods, name, tparams, rhs) =>
+ new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
+ case from.LabelDef(name, params, rhs) =>
+ new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
+ case from.Import(expr, selectors) =>
+ new Import(importTree(expr), selectors map importImportSelector)
+ case from.Template(parents, self, body) =>
+ new Template(parents map importTree, importValDef(self), body map importTree)
+ case from.Block(stats, expr) =>
+ new Block(stats map importTree, importTree(expr))
+ case from.CaseDef(pat, guard, body) =>
+ new CaseDef(importTree(pat), importTree(guard), importTree(body))
+ case from.Alternative(trees) =>
+ new Alternative(trees map importTree)
+ case from.Star(elem) =>
+ new Star(importTree(elem))
+ case from.Bind(name, body) =>
+ new Bind(importName(name), importTree(body))
+ case from.UnApply(fun, args) =>
+ new UnApply(importTree(fun), args map importTree)
+ case from.ArrayValue(elemtpt ,elems) =>
+ new ArrayValue(importTree(elemtpt), elems map importTree)
+ case from.Function(vparams, body) =>
+ new Function(vparams map importValDef, importTree(body))
+ case from.Assign(lhs, rhs) =>
+ new Assign(importTree(lhs), importTree(rhs))
+ case from.AssignOrNamedArg(lhs, rhs) =>
+ new AssignOrNamedArg(importTree(lhs), importTree(rhs))
+ case from.If(cond, thenp, elsep) =>
+ new If(importTree(cond), importTree(thenp), importTree(elsep))
+ case from.Match(selector, cases) =>
+ new Match(importTree(selector), cases map importCaseDef)
+ case from.Return(expr) =>
+ new Return(importTree(expr))
+ case from.Try(block, catches, finalizer) =>
+ new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
+ case from.Throw(expr) =>
+ new Throw(importTree(expr))
+ case from.New(tpt) =>
+ new New(importTree(tpt))
+ case from.Typed(expr, tpt) =>
+ new Typed(importTree(expr), importTree(tpt))
+ case from.TypeApply(fun, args) =>
+ new TypeApply(importTree(fun), args map importTree)
+ case from.Apply(fun, args) => their match {
+ case _: from.ApplyToImplicitArgs =>
+ new ApplyToImplicitArgs(importTree(fun), args map importTree)
+ case _: from.ApplyImplicitView =>
+ new ApplyImplicitView(importTree(fun), args map importTree)
+ case _ =>
+ new Apply(importTree(fun), args map importTree)
+ }
+ case from.ApplyDynamic(qual, args) =>
+ new ApplyDynamic(importTree(qual), args map importTree)
+ case from.Super(qual, mix) =>
+ new Super(importTree(qual), importName(mix).toTypeName)
+ case from.This(qual) =>
+ new This(importName(qual).toTypeName)
+ case from.Select(qual, name) =>
+ new Select(importTree(qual), importName(name))
+ case from.Ident(name) =>
+ new Ident(importName(name))
+ case from.ReferenceToBoxed(ident) =>
+ new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
+ case from.Literal(constant @ from.Constant(_)) =>
+ new Literal(importConstant(constant))
+ case theirtt @ from.TypeTree() =>
+ val mytt = TypeTree()
+ if (theirtt.original != null) mytt.setOriginal(importTree(theirtt.original))
+ mytt
+ case from.Annotated(annot, arg) =>
+ new Annotated(importTree(annot), importTree(arg))
+ case from.SingletonTypeTree(ref) =>
+ new SingletonTypeTree(importTree(ref))
+ case from.SelectFromTypeTree(qual, name) =>
+ new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
+ case from.CompoundTypeTree(templ) =>
+ new CompoundTypeTree(importTemplate(templ))
+ case from.AppliedTypeTree(tpt, args) =>
+ new AppliedTypeTree(importTree(tpt), args map importTree)
+ case from.TypeBoundsTree(lo, hi) =>
+ new TypeBoundsTree(importTree(lo), importTree(hi))
+ case from.ExistentialTypeTree(tpt, whereClauses) =>
+ new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
+ case from.EmptyTree =>
+ EmptyTree
+ case null =>
+ null
+ }
+
+ def importTree(their: from.Tree): Tree = {
+ val my = recreateTree(their)
+ if (my != null) {
+ addFixup(recreatedTreeCompleter(their, my))
+ tryFixup()
+ }
+ my
+ }
+
+ // ============== MISCELLANEOUS ==============
def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo = {
val atp1 = importType(ann.atp)
@@ -301,11 +436,9 @@ trait Importers extends api.Importers { self: SymbolTable =>
NestedAnnotArg(importAnnotationInfo(annInfo))
}
- def importTypeConstraint(constr: from.TypeConstraint): TypeConstraint = {
- val result = new TypeConstraint(constr.loBounds map importType, constr.hiBounds map importType)
- result.inst = importType(constr.inst)
- result
- }
+ // todo. careful import of positions
+ def importPosition(their: from.Position): to.Position =
+ their.asInstanceOf[Position]
// !!! todo: override to cater for PackageScopes
def importScope(decls: from.Scope): Scope =
@@ -313,138 +446,12 @@ trait Importers extends api.Importers { self: SymbolTable =>
def importName(name: from.Name): Name =
if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString)
- def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName
def importModifiers(mods: from.Modifiers): Modifiers =
new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
def importImportSelector(sel: from.ImportSelector): ImportSelector =
new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
-
- def importTree(tree: from.Tree): Tree = {
- val mytree = tree match {
- case from.ClassDef(mods, name, tparams, impl) =>
- new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
- case from.PackageDef(pid, stats) =>
- new PackageDef(importRefTree(pid), stats map importTree)
- case from.ModuleDef(mods, name, impl) =>
- new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
- case from.emptyValDef =>
- emptyValDef
- case from.pendingSuperCall =>
- pendingSuperCall
- case from.ValDef(mods, name, tpt, rhs) =>
- new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
- case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
- case from.TypeDef(mods, name, tparams, rhs) =>
- new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
- case from.LabelDef(name, params, rhs) =>
- new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
- case from.Import(expr, selectors) =>
- new Import(importTree(expr), selectors map importImportSelector)
- case from.Template(parents, self, body) =>
- new Template(parents map importTree, importValDef(self), body map importTree)
- case from.Block(stats, expr) =>
- new Block(stats map importTree, importTree(expr))
- case from.CaseDef(pat, guard, body) =>
- new CaseDef(importTree(pat), importTree(guard), importTree(body))
- case from.Alternative(trees) =>
- new Alternative(trees map importTree)
- case from.Star(elem) =>
- new Star(importTree(elem))
- case from.Bind(name, body) =>
- new Bind(importName(name), importTree(body))
- case from.UnApply(fun, args) =>
- new UnApply(importTree(fun), args map importTree)
- case from.ArrayValue(elemtpt ,elems) =>
- new ArrayValue(importTree(elemtpt), elems map importTree)
- case from.Function(vparams, body) =>
- new Function(vparams map importValDef, importTree(body))
- case from.Assign(lhs, rhs) =>
- new Assign(importTree(lhs), importTree(rhs))
- case from.AssignOrNamedArg(lhs, rhs) =>
- new AssignOrNamedArg(importTree(lhs), importTree(rhs))
- case from.If(cond, thenp, elsep) =>
- new If(importTree(cond), importTree(thenp), importTree(elsep))
- case from.Match(selector, cases) =>
- new Match(importTree(selector), cases map importCaseDef)
- case from.Return(expr) =>
- new Return(importTree(expr))
- case from.Try(block, catches, finalizer) =>
- new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
- case from.Throw(expr) =>
- new Throw(importTree(expr))
- case from.New(tpt) =>
- new New(importTree(tpt))
- case from.Typed(expr, tpt) =>
- new Typed(importTree(expr), importTree(tpt))
- case from.TypeApply(fun, args) =>
- new TypeApply(importTree(fun), args map importTree)
- case from.Apply(fun, args) => tree match {
- case _: from.ApplyToImplicitArgs =>
- new ApplyToImplicitArgs(importTree(fun), args map importTree)
- case _: from.ApplyImplicitView =>
- new ApplyImplicitView(importTree(fun), args map importTree)
- case _ =>
- new Apply(importTree(fun), args map importTree)
- }
- case from.ApplyDynamic(qual, args) =>
- new ApplyDynamic(importTree(qual), args map importTree)
- case from.Super(qual, mix) =>
- new Super(importTree(qual), importTypeName(mix))
- case from.This(qual) =>
- new This(importName(qual).toTypeName)
- case from.Select(qual, name) =>
- new Select(importTree(qual), importName(name))
- case from.Ident(name) =>
- new Ident(importName(name))
- case from.ReferenceToBoxed(ident) =>
- new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
- case from.Literal(constant @ from.Constant(_)) =>
- new Literal(importConstant(constant))
- case from.TypeTree() =>
- new TypeTree()
- case from.Annotated(annot, arg) =>
- new Annotated(importTree(annot), importTree(arg))
- case from.SingletonTypeTree(ref) =>
- new SingletonTypeTree(importTree(ref))
- case from.SelectFromTypeTree(qual, name) =>
- new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
- case from.CompoundTypeTree(templ) =>
- new CompoundTypeTree(importTemplate(templ))
- case from.AppliedTypeTree(tpt, args) =>
- new AppliedTypeTree(importTree(tpt), args map importTree)
- case from.TypeBoundsTree(lo, hi) =>
- new TypeBoundsTree(importTree(lo), importTree(hi))
- case from.ExistentialTypeTree(tpt, whereClauses) =>
- new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
- case from.EmptyTree =>
- EmptyTree
- case null =>
- null
- }
- addFixup({
- if (mytree != null) {
- val mysym = if (tree.hasSymbolField) importSymbol(tree.symbol) else NoSymbol
- val mytpe = importType(tree.tpe)
-
- mytree match {
- case mytt: TypeTree =>
- val tt = tree.asInstanceOf[from.TypeTree]
- if (mytree.hasSymbolField) mytt.symbol = mysym
- if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
- if (tt.original != null) mytt.setOriginal(importTree(tt.original))
- case _ =>
- if (mytree.hasSymbolField) mytree.symbol = importSymbol(tree.symbol)
- mytree setType importType(tree.tpe)
- }
- }
- })
- tryFixup()
- mytree
- }
-
def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef]
def importTypeDef(tree: from.TypeDef): TypeDef = importTree(tree).asInstanceOf[TypeDef]
def importTemplate(tree: from.Template): Template = importTree(tree).asInstanceOf[Template]
diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala
index 4e84a29fd0..3814259e22 100644
--- a/src/reflect/scala/reflect/internal/InfoTransformers.scala
+++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
trait InfoTransformers {
diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
new file mode 100644
index 0000000000..fb1cdb34e1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala
+package reflect
+package internal
+
+import scala.language.implicitConversions
+import java.lang.{ Class => jClass }
+import java.lang.annotation.{ Annotation => jAnnotation }
+import java.lang.reflect.{
+ Member => jMember, Constructor => jConstructor, Method => jMethod,
+ AnnotatedElement => jAnnotatedElement, Type => jType,
+ TypeVariable => jTypeVariable
+}
+
+/** This class tries to abstract over some of the duplication
+ * in java.lang.reflect.{ Method, Constructor }.
+ */
+class JMethodOrConstructor(val member: jMember with jAnnotatedElement) {
+ def isVarArgs: Boolean = member match {
+ case m: jMethod => m.isVarArgs
+ case m: jConstructor[_] => m.isVarArgs
+ }
+ def typeParams: Array[_ <: jTypeVariable[_]] = member match {
+ case m: jMethod => m.getTypeParameters
+ case m: jConstructor[_] => m.getTypeParameters
+ }
+ def paramTypes: Array[jType] = member match {
+ case m: jMethod => m.getGenericParameterTypes
+ case m: jConstructor[_] => m.getGenericParameterTypes
+ }
+ def paramAnnotations: Array[Array[jAnnotation]] = member match {
+ case m: jMethod => m.getParameterAnnotations
+ case m: jConstructor[_] => m.getParameterAnnotations
+ }
+ def resultType: jType = member match {
+ case m: jMethod => m.getGenericReturnType
+ case m: jConstructor[_] => classOf[Unit]
+ }
+}
+
+object JMethodOrConstructor {
+ implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor = new JMethodOrConstructor(m)
+ implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = new JMethodOrConstructor(m)
+}
diff --git a/src/reflect/scala/reflect/internal/JavaAccFlags.scala b/src/reflect/scala/reflect/internal/JavaAccFlags.scala
new file mode 100644
index 0000000000..0a33b8cf0d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/JavaAccFlags.scala
@@ -0,0 +1,84 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala
+package reflect
+package internal
+
+import java.lang.{ Class => jClass }
+import java.lang.reflect.{ Member => jMember, Constructor => jConstructor, Field => jField, Method => jMethod }
+import JavaAccFlags._
+import ClassfileConstants._
+
+/** A value class which encodes the access_flags (JVMS 4.1)
+ * for a field, method, or class. The low 16 bits are the same
+ * as those returned by java.lang.reflect.Member#getModifiers
+ * and found in the bytecode.
+ *
+ * The high bits encode whether the access flags are directly
+ * associated with a class, constructor, field, or method.
+ */
+final class JavaAccFlags private (val coded: Int) extends AnyVal {
+ private def has(mask: Int) = (flags & mask) != 0
+ private def flagCarrierId = coded >>> 16
+ private def flags = coded & 0xFFFF
+
+ def isAbstract = has(JAVA_ACC_ABSTRACT)
+ def isAnnotation = has(JAVA_ACC_ANNOTATION)
+ def isBridge = has(JAVA_ACC_BRIDGE)
+ def isEnum = has(JAVA_ACC_ENUM)
+ def isFinal = has(JAVA_ACC_FINAL)
+ def isInterface = has(JAVA_ACC_INTERFACE)
+ def isNative = has(JAVA_ACC_NATIVE)
+ def isPrivate = has(JAVA_ACC_PRIVATE)
+ def isProtected = has(JAVA_ACC_PROTECTED)
+ def isPublic = has(JAVA_ACC_PUBLIC)
+ def isStatic = has(JAVA_ACC_STATIC)
+ def isStrictFp = has(JAVA_ACC_STRICT)
+ def isSuper = has(JAVA_ACC_SUPER)
+ def isSynchronized = has(JAVA_ACC_SYNCHRONIZED)
+ def isSynthetic = has(JAVA_ACC_SYNTHETIC)
+ def isTransient = has(JAVA_ACC_TRANSIENT)
+ def isVarargs = has(JAVA_ACC_VARARGS)
+ def isVolatile = has(JAVA_ACC_VOLATILE)
+
+ /** Do these flags describe a member which has either protected or package access?
+ * Such access in java is encoded in scala as protected[foo] or private[foo], where
+ * `foo` is the defining package.
+ */
+ def hasPackageAccessBoundary = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PUBLIC) // equivalently, allows protected or package level access
+ def isPackageProtected = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)
+
+ def toJavaFlags: Int = flags
+ def toScalaFlags: Long = flagCarrierId match {
+ case Method | Constructor => FlagTranslation methodFlags flags
+ case Class => FlagTranslation classFlags flags
+ case _ => FlagTranslation fieldFlags flags
+ }
+}
+
+object JavaAccFlags {
+ private val Unknown = 0
+ private val Class = 1
+ private val Field = 2
+ private val Method = 3
+ private val Constructor = 4
+
+ private def create(flagCarrier: Int, access_flags: Int): JavaAccFlags =
+ new JavaAccFlags((flagCarrier << 16) | (access_flags & 0xFFFF))
+
+ def classFlags(flags: Int): JavaAccFlags = create(Class, flags)
+ def methodFlags(flags: Int): JavaAccFlags = create(Method, flags)
+ def fieldFlags(flags: Int): JavaAccFlags = create(Field, flags)
+ def constructorFlags(flags: Int): JavaAccFlags = create(Constructor, flags)
+
+ def apply(access_flags: Int): JavaAccFlags = create(Unknown, access_flags)
+ def apply(clazz: jClass[_]): JavaAccFlags = classFlags(clazz.getModifiers)
+ def apply(member: jMember): JavaAccFlags = member match {
+ case x: jConstructor[_] => constructorFlags(x.getModifiers)
+ case x: jMethod => methodFlags(x.getModifiers)
+ case x: jField => fieldFlags(x.getModifiers)
+ case _ => apply(member.getModifiers)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 3c49aef05a..46a95c7d26 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
@@ -229,4 +230,180 @@ trait Kinds {
}
}
}
+
+ /**
+ * The data structure describing the kind of a given type.
+ *
+ * Proper types are represented using ProperTypeKind.
+ *
+ * Type constructors are reprented using TypeConKind.
+ */
+ abstract class Kind {
+ import Kind.StringState
+ def description: String
+ def order: Int
+ def bounds: TypeBounds
+
+ /** Scala syntax notation of this kind.
+ * Proper types are expresses as A.
+ * Type constructors are expressed as F[k1 >: lo <: hi, k2, ...] where k1, k2, ... are parameter kinds.
+ * If the bounds exists at any level, it preserves the type variable names. Otherwise,
+ * it uses prescribed letters for each level: A, F, X, Y, Z.
+ */
+ def scalaNotation: String
+
+ /** Kind notation used in http://adriaanm.github.com/files/higher.pdf.
+ * Proper types are expressed as *.
+ * Type constructors are expressed * -> *(lo, hi) -(+)-> *.
+ */
+ def starNotation: String
+
+ /** Contains bounds either as part of itself or its arguments.
+ */
+ def hasBounds: Boolean = !bounds.isEmptyBounds
+
+ private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState
+ }
+ object Kind {
+ private[internal] sealed trait ScalaNotation
+ private[internal] sealed case class Head(order: Int, n: Option[Int], alias: Option[String]) extends ScalaNotation {
+ override def toString: String = {
+ alias getOrElse {
+ typeAlias(order) + n.map(_.toString).getOrElse("")
+ }
+ }
+ private def typeAlias(x: Int): String =
+ x match {
+ case 0 => "A"
+ case 1 => "F"
+ case 2 => "X"
+ case 3 => "Y"
+ case 4 => "Z"
+ case n if n < 12 => ('O'.toInt - 5 + n).toChar.toString
+ case _ => "V"
+ }
+ }
+ private[internal] sealed case class Text(value: String) extends ScalaNotation {
+ override def toString: String = value
+ }
+ private[internal] case class StringState(tokens: Seq[ScalaNotation]) {
+ override def toString: String = tokens.mkString
+ def append(value: String): StringState = StringState(tokens :+ Text(value))
+ def appendHead(order: Int, sym: Symbol): StringState = {
+ val n = countByOrder(order) + 1
+ val alias = if (sym eq NoSymbol) None
+ else Some(sym.nameString)
+ StringState(tokens :+ Head(order, Some(n), alias))
+ }
+ def countByOrder(o: Int): Int = tokens count {
+ case Head(`o`, _, _) => true
+ case t => false
+ }
+ // Replace Head(o, Some(1), a) with Head(o, None, a) if countByOrder(o) <= 1, so F1[A] becomes F[A]
+ def removeOnes: StringState = {
+ val maxOrder = (tokens map {
+ case Head(o, _, _) => o
+ case _ => 0
+ }).max
+ StringState((tokens /: (0 to maxOrder)) { (ts: Seq[ScalaNotation], o: Int) =>
+ if (countByOrder(o) <= 1)
+ ts map {
+ case Head(`o`, _, a) => Head(o, None, a)
+ case t => t
+ }
+ else ts
+ })
+ }
+ // Replace Head(o, n, Some(_)) with Head(o, n, None), so F[F] becomes F[A].
+ def removeAlias: StringState = {
+ StringState(tokens map {
+ case Head(o, n, Some(_)) => Head(o, n, None)
+ case t => t
+ })
+ }
+ }
+ private[internal] object StringState {
+ def empty: StringState = StringState(Seq())
+ }
+ }
+ class ProperTypeKind(val bounds: TypeBounds) extends Kind {
+ import Kind.StringState
+ val description: String = "This is a proper type."
+ val order = 0
+ private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = {
+ s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString))
+ }
+ def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString)
+ def starNotation: String = "*" + bounds.starNotation(_.toString)
+ }
+ object ProperTypeKind {
+ def apply: ProperTypeKind = this(TypeBounds.empty)
+ def apply(bounds: TypeBounds): ProperTypeKind = new ProperTypeKind(bounds)
+ def unapply(ptk: ProperTypeKind): Some[TypeBounds] = Some(ptk.bounds)
+ }
+
+ class TypeConKind(val bounds: TypeBounds, val args: Seq[TypeConKind.Argument]) extends Kind {
+ import Kind.StringState
+ val order = (args map {_.kind.order} max) + 1
+ def description: String =
+ if (order == 1) "This is a type constructor: a 1st-order-kinded type."
+ else "This is a type constructor that takes type constructor(s): a higher-kinded type."
+ override def hasBounds: Boolean = super.hasBounds || args.exists(_.kind.hasBounds)
+ def scalaNotation: String = {
+ val s = buildState(NoSymbol, Variance.Invariant)(StringState.empty).removeOnes
+ val s2 = if (hasBounds) s
+ else s.removeAlias
+ s2.toString
+ }
+ private[internal] def buildState(sym: Symbol, v: Variance)(s0: StringState): StringState = {
+ var s: StringState = s0
+ s = s.append(v.symbolicString).appendHead(order, sym).append("[")
+ args.zipWithIndex foreach { case (arg, i) =>
+ s = arg.kind.buildState(arg.sym, arg.variance)(s)
+ if (i != args.size - 1) {
+ s = s.append(",")
+ }
+ }
+ s = s.append("]").append(bounds.scalaNotation(_.toString))
+ s
+ }
+ def starNotation: String = {
+ import Variance._
+ (args map { arg =>
+ (if (arg.kind.order == 0) arg.kind.starNotation
+ else "(" + arg.kind.starNotation + ")") +
+ (if (arg.variance == Invariant) " -> "
+ else " -(" + arg.variance.symbolicString + ")-> ")
+ }).mkString + "*" + bounds.starNotation(_.toString)
+ }
+ }
+ object TypeConKind {
+ def apply(args: Seq[TypeConKind.Argument]): TypeConKind = this(TypeBounds.empty, args)
+ def apply(bounds: TypeBounds, args: Seq[TypeConKind.Argument]): TypeConKind = new TypeConKind(bounds, args)
+ def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some(tck.bounds, tck.args)
+ case class Argument(variance: Variance, kind: Kind)(val sym: Symbol) {}
+ }
+
+ /**
+ * Starting from a Symbol (sym) or a Type (tpe), infer the kind that classifies it (sym.tpeHK/tpe).
+ */
+ object inferKind {
+ import TypeConKind.Argument
+
+ abstract class InferKind {
+ protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind
+ protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel)
+ def apply(sym: Symbol): Kind = infer(sym, true)
+ def apply(tpe: Type, owner: Symbol): Kind = infer(tpe, owner, true)
+ }
+
+ def apply(pre: Type): InferKind = new InferKind {
+ protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
+ val bounds = if (topLevel) TypeBounds.empty
+ else tpe.asSeenFrom(pre, owner).bounds
+ if(!tpe.isHigherKinded) ProperTypeKind(bounds)
+ else TypeConKind(bounds, tpe.typeParams map { p => Argument(p.variance, infer(p, false))(p) })
+ }
+ }
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 81d7619f22..bf38c3bf1e 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -35,7 +36,7 @@ trait Mirrors extends api.Mirrors {
else definitions.findNamedMember(segs.tail, RootClass.info member segs.head)
}
- /** Todo: organize similar to mkStatic in reflect.Base */
+ /** Todo: organize similar to mkStatic in scala.reflect.Base */
private def getModuleOrClass(path: Name, len: Int): Symbol = {
val point = path lastPos('.', len - 1)
val owner =
diff --git a/src/reflect/scala/reflect/internal/MissingRequirementError.scala b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
index 48203caa83..66dbf535d7 100644
--- a/src/reflect/scala/reflect/internal/MissingRequirementError.scala
+++ b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
class MissingRequirementError private (msg: String) extends FatalError(msg) {
diff --git a/src/reflect/scala/reflect/internal/Mode.scala b/src/reflect/scala/reflect/internal/Mode.scala
index 850e3b5669..027e3a340a 100644
--- a/src/reflect/scala/reflect/internal/Mode.scala
+++ b/src/reflect/scala/reflect/internal/Mode.scala
@@ -3,9 +3,12 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
+import scala.language.implicitConversions
+
object Mode {
private implicit def liftIntBitsToMode(bits: Int): Mode = apply(bits)
def apply(bits: Int): Mode = new Mode(bits)
@@ -45,35 +48,10 @@ object Mode {
*/
final val TAPPmode: Mode = 0x080
- /** SUPERCONSTRmode is set for the super
- * in a superclass constructor call super.<init>.
- */
- final val SUPERCONSTRmode: Mode = 0x100
-
- /** SNDTRYmode indicates that an application is typed for the 2nd time.
- * In that case functions may no longer be coerced with implicit views.
- */
- final val SNDTRYmode: Mode = 0x200
-
/** LHSmode is set for the left-hand side of an assignment.
*/
final val LHSmode: Mode = 0x400
- /** STARmode is set when star patterns are allowed.
- * (This was formerly called REGPATmode.)
- */
- final val STARmode: Mode = 0x1000
-
- /** ALTmode is set when we are under a pattern alternative.
- */
- final val ALTmode: Mode = 0x2000
-
- /** HKmode is set when we are typing a higher-kinded type.
- * adapt should then check kind-arity based on the prototypical type's
- * kind arity. Type arguments should not be inferred.
- */
- final val HKmode: Mode = 0x4000 // @M: could also use POLYmode | TAPPmode
-
/** BYVALmode is set when we are typing an expression
* that occurs in a by-value position. An expression e1 is in by-value
* position within expression e2 iff it will be reduced to a value at that
@@ -87,15 +65,15 @@ object Mode {
*/
final val TYPEPATmode: Mode = 0x10000
- /** RETmode is set when we are typing a return expression.
- */
- final val RETmode: Mode = 0x20000
-
- final private val StickyModes: Mode = EXPRmode | PATTERNmode | TYPEmode | ALTmode
+ private val StickyModes: Mode = EXPRmode | PATTERNmode | TYPEmode
+ private val StickyModesForFun: Mode = StickyModes | SCCmode
+ final val MonoQualifierModes: Mode = EXPRmode | QUALmode
+ final val PolyQualifierModes: Mode = EXPRmode | QUALmode | POLYmode
+ final val OperatorModes: Mode = EXPRmode | POLYmode | TAPPmode | FUNmode
/** Translates a mask of mode flags into something readable.
*/
- private val modeNameMap = Map[Int, String](
+ private val modeNameMap = Map[Int, String]( // TODO why duplicate the bitmasks here, rather than just referring to this.EXPRmode etc?
(1 << 0) -> "EXPRmode",
(1 << 1) -> "PATTERNmode",
(1 << 2) -> "TYPEmode",
@@ -104,13 +82,13 @@ object Mode {
(1 << 5) -> "POLYmode",
(1 << 6) -> "QUALmode",
(1 << 7) -> "TAPPmode",
- (1 << 8) -> "SUPERCONSTRmode",
- (1 << 9) -> "SNDTRYmode",
+ (1 << 8) -> "<>", // formerly SUPERCONSTRmode
+ (1 << 9) -> "<>", // formerly SNDTRYmode
(1 << 10) -> "LHSmode",
- (1 << 11) -> "<DOES NOT EXIST mode>",
- (1 << 12) -> "STARmode",
- (1 << 13) -> "ALTmode",
- (1 << 14) -> "HKmode",
+ (1 << 11) -> "<>",
+ (1 << 12) -> "<>", // formerly STARmode
+ (1 << 13) -> "<>", // formerly ALTmode
+ (1 << 14) -> "<>", // formerly HKmode
(1 << 15) -> "BYVALmode",
(1 << 16) -> "TYPEPATmode"
).map({ case (k, v) => Mode(k) -> v })
@@ -118,32 +96,47 @@ object Mode {
import Mode._
final class Mode private (val bits: Int) extends AnyVal {
- def &(other: Mode): Mode = new Mode(bits & other.bits)
- def |(other: Mode): Mode = new Mode(bits | other.bits)
+ def &(other: Mode): Mode = new Mode(bits & other.bits)
+ def |(other: Mode): Mode = new Mode(bits | other.bits)
def &~(other: Mode): Mode = new Mode(bits & ~(other.bits))
- def onlySticky = this & Mode.StickyModes
- def forFunMode = this & (Mode.StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
- def forTypeMode =
- if (inAny(PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode
- else TYPEmode
-
- def inAll(required: Mode) = (this & required) == required
- def inAny(required: Mode) = (this & required) !=NOmode
- def inNone(prohibited: Mode) = (this & prohibited) == NOmode
- def inHKMode = inAll(HKmode)
- def inFunMode = inAll(FUNmode)
- def inPolyMode = inAll(POLYmode)
- def inPatternMode = inAll(PATTERNmode)
- def inExprMode = inAll(EXPRmode)
- def inByValMode = inAll(BYVALmode)
- def inRetMode = inAll(RETmode)
-
- def inPatternNotFunMode = inPatternMode && !inFunMode
- def inExprModeOr(others: Mode) = inAny(EXPRmode | others)
- def inExprModeButNot(prohibited: Mode) = inAll(EXPRmode) && inNone(prohibited)
+ def onlyTypePat = this & TYPEPATmode
+ def onlySticky = this & Mode.StickyModes
+ def forFunMode = this & Mode.StickyModesForFun | FUNmode | POLYmode | BYVALmode
+ def forTypeMode = if (typingPatternOrTypePat) TYPEmode | TYPEPATmode else TYPEmode
+
+ def inAll(required: Mode) = (this & required) == required
+ def inAny(required: Mode) = (this & required) != NOmode
+ def inNone(prohibited: Mode) = (this & prohibited) == NOmode
+
+ /** True if this mode matches every mode in the 'all' Mode,
+ * and no modes in the 'none' Mode.
+ */
+ def in(all: Mode = NOmode, none: Mode = NOmode) = inAll(all) && inNone(none)
+
+ def inByValMode = inAll(BYVALmode)
+ def inExprMode = inAll(EXPRmode)
+ def inFunMode = inAll(FUNmode)
+ def inPatternMode = inAll(PATTERNmode)
+ def inPolyMode = inAll(POLYmode)
+ def inQualMode = inAll(QUALmode)
+ def inSccMode = inAll(SCCmode)
+ def inTappMode = inAll(TAPPmode)
+ def inTypeMode = inAll(TYPEmode)
+
+ def typingExprByValue = inAll(EXPRmode | BYVALmode)
+ def typingExprFun = inAll(EXPRmode | FUNmode)
+ def typingExprNotFun = in(all = EXPRmode, none = FUNmode)
+ def typingExprNotFunNotLhs = in(all = EXPRmode, none = FUNmode | LHSmode)
+ def typingExprNotLhs = in(all = EXPRmode, none = LHSmode)
+ def typingExprNotValue = in(all = EXPRmode, none = BYVALmode)
+ def typingMonoExprByValue = in(all = EXPRmode | BYVALmode, none = POLYmode)
+ def typingConstructorPattern = inAll(PATTERNmode | FUNmode)
+ def typingPatternNotConstructor = in(all = PATTERNmode, none = FUNmode)
+ def typingPatternOrTypePat = inAny(PATTERNmode | TYPEPATmode)
+ def typingTypeByValue = inAll(TYPEmode | BYVALmode)
override def toString =
- if (bits == 0) "NOmode"
- else (modeNameMap filterKeys inAll).values.toList.sorted mkString " "
+ if (this == NOmode) "NOmode"
+ else (modeNameMap filterKeys inAll).values.toList.sorted mkString "-"
}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index b8141d25f5..ed5b92565d 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.io.Codec
diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala
index c0f4232724..450c90ed56 100644
--- a/src/reflect/scala/reflect/internal/Phase.scala
+++ b/src/reflect/scala/reflect/internal/Phase.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
abstract class Phase(val prev: Phase) {
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index 3cf4f4a1df..eef8159ad4 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import util._
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 5b80889225..1603029340 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -5,7 +5,8 @@
// todo. we need to unify this prettyprinter with NodePrinters
-package scala.reflect
+package scala
+package reflect
package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
@@ -421,13 +422,20 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(tp); printRow(args, "[", ", ", "]")
case TypeBoundsTree(lo, hi) =>
- printOpt(" >: ", lo); printOpt(" <: ", hi)
+ // Avoid printing noisy empty typebounds everywhere
+ // Untyped empty bounds are not printed by printOpt,
+ // but after they are typed we have to exclude Nothing/Any.
+ if ((lo.tpe eq null) || !(lo.tpe =:= definitions.NothingTpe))
+ printOpt(" >: ", lo)
+
+ if ((hi.tpe eq null) || !(hi.tpe =:= definitions.AnyTpe))
+ printOpt(" <: ", hi)
case ExistentialTypeTree(tpt, whereClauses) =>
print(tpt)
printColumn(whereClauses, " forSome { ", ";", "}")
-// SelectFromArray is no longer visible in reflect.internal.
+// SelectFromArray is no longer visible in scala.reflect.internal.
// eliminated until we figure out what we will do with both Printers and
// SelectFromArray.
// case SelectFromArray(qualifier, name, _) =>
diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala
index 9b99b94b41..996f9c13bb 100644
--- a/src/reflect/scala/reflect/internal/PrivateWithin.scala
+++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala
@@ -1,23 +1,27 @@
-package scala.reflect
+package scala
+package reflect
package internal
-import ClassfileConstants._
+import java.lang.{ Class => jClass }
+import java.lang.reflect.{ Member => jMember }
trait PrivateWithin {
self: SymbolTable =>
- def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
- if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
- // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
- // apparently occurs when processing v45.3 bytecode.
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- // protected in java means package protected. #3946
- if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- sym
+ def propagatePackageBoundary(c: jClass[_], syms: Symbol*): Unit =
+ propagatePackageBoundary(JavaAccFlags(c), syms: _*)
+ def propagatePackageBoundary(m: jMember, syms: Symbol*): Unit =
+ propagatePackageBoundary(JavaAccFlags(m), syms: _*)
+ def propagatePackageBoundary(jflags: JavaAccFlags, syms: Symbol*) {
+ if (jflags.hasPackageAccessBoundary)
+ syms foreach setPackageAccessBoundary
}
-} \ No newline at end of file
+
+ // protected in java means package protected. #3946
+ // See ticket #1687 for an example of when the enclosing top level class is NoSymbol;
+ // it apparently occurs when processing v45.3 bytecode.
+ def setPackageAccessBoundary(sym: Symbol): Symbol = (
+ if (sym.enclosingTopLevelClass eq NoSymbol) sym
+ else sym setPrivateWithin sym.enclosingTopLevelClass.owner
+ )
+}
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index 93383f5376..14db252a16 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import settings.MutableSettings
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 850c497d4b..8d20c8e546 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -3,9 +3,12 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
+import scala.annotation.tailrec
+
trait Scopes extends api.Scopes { self: SymbolTable =>
/** An ADT to represent the results of symbol name lookups.
@@ -65,6 +68,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** a cache for all elements, to be used by symbol iterator.
*/
private var elemsCache: List[Symbol] = null
+ private var cachedSize = -1
+ private def flushElemsCache() {
+ elemsCache = null
+ cachedSize = -1
+ }
/** size and mask of hash tables
* todo: make hashtables grow?
@@ -86,6 +94,12 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** the number of entries in this scope */
override def size: Int = {
+ if (cachedSize < 0)
+ cachedSize = directSize
+
+ cachedSize
+ }
+ private def directSize: Int = {
var s = 0
var e = elems
while (e ne null) {
@@ -98,7 +112,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** enter a scope entry
*/
protected def enterEntry(e: ScopeEntry) {
- elemsCache = null
+ flushElemsCache()
if (hashtable ne null)
enterInHash(e)
else if (size >= MIN_HASH)
@@ -192,7 +206,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
e1.tail = e.tail
}
}
- elemsCache = null
+ flushElemsCache()
}
/** remove symbol */
@@ -304,16 +318,43 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
e
}
+ /** TODO - we can test this more efficiently than checking isSubScope
+ * in both directions. However the size test might be enough to quickly
+ * rule out most failures.
+ */
+ def isSameScope(other: Scope) = (
+ (size == other.size) // optimization - size is cached
+ && (this isSubScope other)
+ && (other isSubScope this)
+ )
+
+ def isSubScope(other: Scope) = {
+ def scopeContainsSym(sym: Symbol): Boolean = {
+ @tailrec def entryContainsSym(e: ScopeEntry): Boolean = e match {
+ case null => false
+ case _ =>
+ val comparableInfo = sym.info.substThis(sym.owner, e.sym.owner)
+ (e.sym.info =:= comparableInfo) || entryContainsSym(lookupNextEntry(e))
+ }
+ entryContainsSym(this lookupEntry sym.name)
+ }
+ other.toList forall scopeContainsSym
+ }
+
/** Return all symbols as a list in the order they were entered in this scope.
*/
override def toList: List[Symbol] = {
if (elemsCache eq null) {
- elemsCache = Nil
+ var symbols: List[Symbol] = Nil
+ var count = 0
var e = elems
while ((e ne null) && e.owner == this) {
- elemsCache = e.sym :: elemsCache
+ count += 1
+ symbols ::= e.sym
e = e.next
}
+ elemsCache = symbols
+ cachedSize = count
}
elemsCache
}
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 6c5bbc9774..9eb66db01e 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
trait StdAttachments {
diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala
index 5e5e4f9043..a0084dc95c 100644
--- a/src/reflect/scala/reflect/internal/StdCreators.scala
+++ b/src/reflect/scala/reflect/internal/StdCreators.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import scala.reflect.api.{TreeCreator, TypeCreator}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 4fd86aa8b1..81fffc833c 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -3,10 +3,12 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import java.security.MessageDigest
+import java.util.UUID.randomUUID
import Chars.isOperatorPart
import scala.annotation.switch
import scala.language.implicitConversions
@@ -289,6 +291,9 @@ trait StdNames {
val FAKE_LOCAL_THIS: NameType = "this$"
val LAZY_LOCAL: NameType = "$lzy"
val LAZY_SLOW_SUFFIX: NameType = "$lzycompute"
+ val MACRO_INVOKER_PACKAGE: NameType = "scala.reflect.macros.synthetic"
+ // TODO: if I use dollars in MACRO_INVOKER_SUFFIX, as in "$Invoker$", then Scala reflection fails to load implementations
+ val MACRO_INVOKER_SUFFIX: NameType = "Invoker"
val UNIVERSE_BUILD_PREFIX: NameType = "$u.build."
val UNIVERSE_PREFIX: NameType = "$u."
val UNIVERSE_SHORT: NameType = "$u"
@@ -359,8 +364,8 @@ trait StdNames {
* be sure to retain the extra dollars.
*/
def unexpandedName(name: Name): Name = name lastIndexOf "$$" match {
- case -1 => name
- case idx0 =>
+ case 0 | -1 => name
+ case idx0 =>
// Sketchville - We've found $$ but if it's part of $$$ or $$$$
// or something we need to keep the bonus dollars, so e.g. foo$$$outer
// has an original name of $outer.
@@ -583,12 +588,14 @@ trait StdNames {
val box: NameType = "box"
val build : NameType = "build"
val bytes: NameType = "bytes"
+ val c: NameType = "c"
val canEqual_ : NameType = "canEqual"
val checkInitialized: NameType = "checkInitialized"
val classOf: NameType = "classOf"
val clone_ : NameType = "clone"
val conforms: NameType = "conforms"
val copy: NameType = "copy"
+ val create: NameType = "create"
val currentMirror: NameType = "currentMirror"
val delayedInit: NameType = "delayedInit"
val delayedInitArg: NameType = "delayedInit$body"
@@ -697,6 +704,7 @@ trait StdNames {
val view_ : NameType = "view"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
+ val zero: NameType = "zero"
// unencoded operators
object raw {
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 336c2748c6..2ae9f81a09 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.annotation.elidable
@@ -47,6 +48,7 @@ abstract class SymbolTable extends macros.Universe
def log(msg: => AnyRef): Unit
def warning(msg: String): Unit = Console.err.println(msg)
+ def inform(msg: String): Unit = Console.err.println(msg)
def globalError(msg: String): Unit = abort(msg)
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
@@ -124,6 +126,8 @@ abstract class SymbolTable extends macros.Universe
val global: SymbolTable.this.type = SymbolTable.this
} with util.TraceSymbolActivity
+ val treeInfo: TreeInfo { val global: SymbolTable.this.type }
+
/** Check that the executing thread is the compiler thread. No-op here,
* overridden in interactive.Global. */
@elidable(elidable.WARNING)
@@ -303,27 +307,20 @@ abstract class SymbolTable extends macros.Universe
}
object perRunCaches {
- import java.lang.ref.WeakReference
import scala.collection.generic.Clearable
// Weak references so the garbage collector will take care of
// letting us know when a cache is really out of commission.
- private val caches = mutable.HashSet[WeakReference[Clearable]]()
+ private val caches = WeakHashSet[Clearable]()
def recordCache[T <: Clearable](cache: T): T = {
- caches += new WeakReference(cache)
+ caches += cache
cache
}
def clearAll() = {
debuglog("Clearing " + caches.size + " caches.")
- caches foreach { ref =>
- val cache = ref.get()
- if (cache == null)
- caches -= ref
- else
- cache.clear()
- }
+ caches foreach (_.clear)
}
def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 8ef2805529..2da9fa1cca 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
@@ -91,6 +92,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def isExistential: Boolean = this.isExistentiallyBound
def isParamWithDefault: Boolean = this.hasDefault
+ // `isByNameParam` is only true for a call-by-name parameter of a *method*,
+ // an argument of the primary constructor seen in the class body is excluded by `isValueParameter`
def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
def isJava: Boolean = isJavaDefined
@@ -234,7 +237,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Static constructor with info set. */
def newStaticConstructor(pos: Position): MethodSymbol =
- newConstructor(pos, STATIC) setInfo UnitClass.tpe
+ newConstructor(pos, STATIC) setInfo UnitTpe
/** Instance constructor with info set. */
def newClassConstructor(pos: Position): MethodSymbol =
@@ -266,7 +269,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val newName = nme.moduleVarName(accessor.name.toTermName)
val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 )
val newInfo = accessor.tpe.finalResultType
- val mval = newVariable(newName, accessor.pos.focus, newFlags) addAnnotation VolatileAttr
+ val mval = newVariable(newName, accessor.pos.focus, newFlags.toLong) addAnnotation VolatileAttr
if (this.isClass)
mval setInfoAndEnter newInfo
@@ -624,6 +627,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
(flags & mask) != 0
}
+ def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong)
+
/** Does symbol have ALL the flags in `mask` set? */
final def hasAllFlags(mask: Long): Boolean = {
if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
@@ -659,8 +664,29 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def hasGetter = isTerm && nme.isLocalName(name)
+ /** A little explanation for this confusing situation.
+ * Nested modules which have no static owner when ModuleDefs
+ * are eliminated (refchecks) are given the lateMETHOD flag,
+ * which makes them appear as methods after refchecks.
+ * Here's an example where one can see all four of FF FT TF TT
+ * for (isStatic, isMethod) at various phases.
+ *
+ * trait A1 { case class Quux() }
+ * object A2 extends A1 { object Flax }
+ * // -- namer object Quux in trait A1
+ * // -M flatten object Quux in trait A1
+ * // S- flatten object Flax in object A2
+ * // -M posterasure object Quux in trait A1
+ * // -M jvm object Quux in trait A1
+ * // SM jvm object Quux in object A2
+ *
+ * So "isModuleNotMethod" exists not for its achievement in
+ * brevity, but to encapsulate the relevant condition.
+ */
+ def isModuleNotMethod = isModule && !isMethod
+ def isStaticModule = isModuleNotMethod && isStatic
+
final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR)
- final def isStaticModule = isModule && isStatic && !isMethod
final def isThisSym = isTerm && owner.thisSym == this
final def isError = hasFlag(IS_ERROR)
final def isErroneous = isError || isInitialized && tpe_*.isErroneous
@@ -766,8 +792,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this symbol an accessor method for outer? */
final def isOuterField = isArtifact && (unexpandedName == nme.OUTER_LOCAL)
- /** Does this symbol denote a stable value? */
- def isStable = false
+ /** Does this symbol denote a stable value, ignoring volatility?
+ *
+ * Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
+ */
+ final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+ final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
/** Does this symbol denote the primary constructor of its enclosing class? */
final def isPrimaryConstructor =
@@ -864,16 +894,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Is this a term symbol only defined in a refinement (so that it needs
* to be accessed by reflection)?
*/
- def isOnlyRefinementMember: Boolean = (
- isTerm && // type members are not affected
- owner.isRefinementClass && // owner must be a refinement class
- (owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb)
- !isOverridingSymbol && // symbol must not override a symbol in a base class
- !isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well?
+ def isOnlyRefinementMember = (
+ isTerm // Type members are unaffected
+ && owner.isRefinementClass // owner must be a refinement class
+ && isPossibleInRefinement // any overridden symbols must also have refinement class owners
+ && !isConstant // Must not be a constant. Question: Can we exclude @inline methods as well?
+ && isDeclaredByOwner // Must be explicitly declared in the refinement (not synthesized from glb)
)
+ // "(owner.info decl name) == this" is inadequate, because "name" might
+ // be overloaded in owner - and this might be an overloaded symbol.
+ // TODO - make this cheaper and see where else we should be doing something similar.
+ private def isDeclaredByOwner = (owner.info decl name).alternatives exists (alternatives contains _)
final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic
- final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol
+ final def isPossibleInRefinement = (
+ !isConstructor
+ && allOverriddenSymbols.forall(_.owner.isRefinementClass) // this includes allOverriddenSymbols.isEmpty
+ )
/** A a member of class `base` is incomplete if
* (1) it is declared deferred or
@@ -887,9 +924,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
supersym == NoSymbol || supersym.isIncompleteIn(base)
}
- // Does not always work if the rawInfo is a SourcefileLoader, see comment
- // in "def coreClassesFirst" in Global.
- def exists = !isTopLevel || { rawInfo.load(this); rawInfo != NoType }
+ def exists: Boolean = !isTopLevel || {
+ val isSourceLoader = rawInfo match {
+ case sl: SymLoader => sl.fromSource
+ case _ => false
+ }
+ def warnIfSourceLoader() {
+ if (isSourceLoader)
+ // Predef is completed early due to its autoimport; we used to get here when type checking its
+ // parent LowPriorityImplicits. See comment in c5441dc for more elaboration.
+ // Since the fix for SI-7335 Predef parents must be defined in Predef.scala, and we should not
+ // get here anymore.
+ devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.");
+ }
+
+ rawInfo load this
+ rawInfo != NoType || { warnIfSourceLoader(); false }
+ }
final def isInitialized: Boolean =
validTo != NoPeriod
@@ -1096,12 +1147,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
new TermSymbol(this, pos, name) initFlags newFlags
final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = {
- if ((newFlags & METHOD) != 0)
- createMethodSymbol(name, pos, newFlags)
- else if ((newFlags & PACKAGE) != 0)
+ // Package before Module, Module before Method, or we might grab the wrong guy.
+ if ((newFlags & PACKAGE) != 0)
createPackageSymbol(name, pos, newFlags | PackageFlags)
else if ((newFlags & MODULE) != 0)
createModuleSymbol(name, pos, newFlags)
+ else if ((newFlags & METHOD) != 0)
+ createMethodSymbol(name, pos, newFlags)
else if ((newFlags & PARAM) != 0)
createValueParameterSymbol(name, pos, newFlags)
else
@@ -1461,10 +1513,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* This is done in checkAccessible and overriding checks in refchecks
* We can't do this on class loading because it would result in infinite cycles.
*/
- def cookJavaRawInfo(): Unit = {
+ def cookJavaRawInfo(): this.type = {
// only try once...
- if (this hasFlag TRIEDCOOKING)
- return
+ if (phase.erasedTypes || (this hasFlag TRIEDCOOKING))
+ return this
this setFlag TRIEDCOOKING
info // force the current info
@@ -1472,6 +1524,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
this modifyInfo rawToExistential
else if (isOverloaded)
alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential)
+
+ this
}
/** The logic approximately boils down to finding the most recent phase
@@ -1577,7 +1631,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def makeSerializable() {
info match {
case ci @ ClassInfoType(_, _, _) =>
- setInfo(ci.copy(parents = ci.parents :+ SerializableClass.tpe))
+ setInfo(ci.copy(parents = ci.parents :+ SerializableTpe))
case i =>
abort("Only ClassInfoTypes can be made serializable: "+ i)
}
@@ -2550,13 +2604,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR
override def isConstructor = nme.isConstructorName(name)
- override def isPackageObject = isModule && (name == nme.PACKAGE)
- override def isStable = !isUnstable
- private def isUnstable = (
- isMutable
- || (hasFlag(METHOD | BYNAMEPARAM) && !hasFlag(STABLE))
- || (tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
- )
+ override def isPackageObject = isModule && (name == nme.PACKAGE)
// The name in comments is what it is being disambiguated from.
// TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names.
@@ -2621,32 +2669,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** change name by appending $$<fully-qualified-name-of-class `base`>
- * Do the same for any accessed symbols or setters/getters.
- * If the accessor to be renamed is overriding a base symbol, enter
- * a cloned symbol with the original name but without ACCESSOR flag.
+ * Do the same for any accessed symbols or setters/getters
*/
override def expandName(base: Symbol) {
- def expand(sym: Symbol) {
- if ((sym eq NoSymbol) || (sym hasFlag EXPANDEDNAME)) () // skip
- else sym setFlag EXPANDEDNAME setName nme.expandedName(sym.name.toTermName, base)
- }
- def cloneAndExpand(accessor: Symbol) {
- val clone = accessor.cloneSymbol(accessor.owner, (accessor.flags | ARTIFACT) & ~ACCESSOR)
- expand(accessor)
- log(s"Expanded overriding accessor to $accessor, but cloned $clone to preserve override")
- accessor.owner.info.decls enter clone
- }
- def expandAccessor(accessor: Symbol) {
- if (accessor.isOverridingSymbol) cloneAndExpand(accessor) else expand(accessor)
- }
- if (hasAccessorFlag && !isDeferred) {
- expand(accessed)
- }
- else if (hasGetter) {
- expandAccessor(getter(owner))
- expandAccessor(setter(owner))
+ if (!hasFlag(EXPANDEDNAME)) {
+ setFlag(EXPANDEDNAME)
+ if (hasAccessorFlag && !isDeferred) {
+ accessed.expandName(base)
+ }
+ else if (hasGetter) {
+ getter(owner).expandName(base)
+ setter(owner).expandName(base)
+ }
+ name = nme.expandedName(name.toTermName, base)
}
- expand(this)
}
}
implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
@@ -2841,18 +2877,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
tpeCache
}
override def typeConstructor: Type = {
- maybeUpdateTyconCache()
+ if (tyconCacheNeedsUpdate)
+ setTyconCache(newTypeRef(Nil))
tyconCache
}
override def tpeHK: Type = typeConstructor
- private def maybeUpdateTyconCache() {
- if ((tyconCache eq null) || tyconRunId != currentRunId) {
- tyconCache = newTypeRef(Nil)
- tyconRunId = currentRunId
- }
- assert(tyconCache ne null)
+ private def tyconCacheNeedsUpdate = (tyconCache eq null) || tyconRunId != currentRunId
+ private def setTyconCache(tycon: Type) {
+ tyconCache = tycon
+ tyconRunId = currentRunId
+ assert(tyconCache ne null, this)
}
+
private def maybeUpdateTypeCache() {
if (tpePeriod != currentPeriod) {
if (isValid(tpePeriod))
@@ -2869,10 +2906,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
tpePeriod = currentPeriod
tpeCache = NoType // cycle marker
+ val noTypeParams = phase.erasedTypes && this != ArrayClass || unsafeTypeParams.isEmpty
tpeCache = newTypeRef(
- if (phase.erasedTypes && this != ArrayClass || unsafeTypeParams.isEmpty) Nil
+ if (noTypeParams) Nil
else unsafeTypeParams map (_.typeConstructor)
)
+ // Avoid carrying around different types in tyconCache and tpeCache
+ // for monomorphic types.
+ if (noTypeParams && tyconCacheNeedsUpdate)
+ setTyconCache(tpeCache)
}
override def info_=(tp: Type) {
@@ -2927,7 +2969,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// a type symbol bound by an existential type, for instance the T in
// List[T] forSome { type T }
override def isExistentialSkolem = this hasFlag EXISTENTIAL
- override def isGADTSkolem = this hasAllFlags GADT_SKOLEM_FLAGS
+ override def isGADTSkolem = this hasAllFlags GADT_SKOLEM_FLAGS.toLong
override def isTypeSkolem = this hasFlag PARAM
override def isAbstractType = this hasFlag DEFERRED
@@ -3025,8 +3067,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* returned, otherwise, `NoSymbol` is returned.
*/
protected final def companionModule0: Symbol =
- flatOwnerInfo.decl(name.toTermName).suchThat(
- sym => sym.isModule && (sym isCoDefinedWith this) && !sym.isMethod)
+ flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModuleNotMethod && (sym isCoDefinedWith this))
override def companionModule = companionModule0
override def companionSymbol = companionModule0
@@ -3372,6 +3413,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
+ def existingSymbols(syms: List[Symbol]): List[Symbol] =
+ syms filter (s => (s ne null) && (s ne NoSymbol))
+
/** Return closest enclosing method, unless shadowed by an enclosing class. */
// TODO Move back to ExplicitOuter when the other call site is removed.
// no use of closures here in the interest of speed.
@@ -3404,7 +3448,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@tailrec private[scala] final
def allSymbolsHaveOwner(syms: List[Symbol], owner: Symbol): Boolean = syms match {
case sym :: rest => sym.owner == owner && allSymbolsHaveOwner(rest, owner)
- case _ => true
+ case _ => true
}
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index d3e486311e..b75fd72526 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
abstract class TreeGen extends macros.TreeBuilder {
@@ -136,22 +137,20 @@ abstract class TreeGen extends macros.TreeBuilder {
/** Replaces tree type with a stable type if possible */
def stabilize(tree: Tree): Tree = stableTypeFor(tree) match {
- case Some(tp) => tree setType tp
- case _ => tree
+ case NoType => tree
+ case tp => tree setType tp
}
/** Computes stable type for a tree if possible */
- def stableTypeFor(tree: Tree): Option[Type] = tree match {
- case This(_) if tree.symbol != null && !tree.symbol.isError =>
- Some(ThisType(tree.symbol))
- case Ident(_) if tree.symbol.isStable =>
- Some(singleType(tree.symbol.owner.thisType, tree.symbol))
- case Select(qual, _) if ((tree.symbol ne null) && (qual.tpe ne null)) && // turned assert into guard for #4064
- tree.symbol.isStable && qual.tpe.isStable =>
- Some(singleType(qual.tpe, tree.symbol))
- case _ =>
- None
- }
+ def stableTypeFor(tree: Tree): Type = (
+ if (!treeInfo.admitsTypeSelection(tree)) NoType
+ else tree match {
+ case This(_) => ThisType(tree.symbol)
+ case Ident(_) => singleType(tree.symbol.owner.thisType, tree.symbol)
+ case Select(qual, _) => singleType(qual.tpe, tree.symbol)
+ case _ => NoType
+ }
+ )
/** Builds a reference with stable type to given symbol */
def mkAttributedStableRef(pre: Type, sym: Symbol): Tree =
@@ -228,7 +227,7 @@ abstract class TreeGen extends macros.TreeBuilder {
/** Cast `tree` to `pt`, unless tpe is a subtype of pt, or pt is Unit. */
def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
- if ((pt == UnitClass.tpe) || (tpe <:< pt)) tree
+ if ((pt == UnitTpe) || (tpe <:< pt)) tree
else atPos(tree.pos)(mkAsInstanceOf(tree, pt, any = true, wrapInApply = !beforeRefChecks))
/** Apparently we smuggle a Type around as a Literal(Constant(tp))
@@ -254,7 +253,7 @@ abstract class TreeGen extends macros.TreeBuilder {
* which is appropriate to the given Type.
*/
def mkZero(tp: Type): Tree = tp.typeSymbol match {
- case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingClass.tpe
+ case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingTpe
case _ => Literal(mkConstantZero(tp)) setType tp
}
@@ -298,4 +297,9 @@ abstract class TreeGen extends macros.TreeBuilder {
def mkPackageDef(packageName: String, stats: List[Tree]): PackageDef = {
PackageDef(mkUnattributedRef(newTermName(packageName)), stats)
}
+
+ def mkSeqApply(arg: Tree): Apply = {
+ val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
+ Apply(factory, List(arg))
+ }
}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index b1f58814c7..3a8d3fd460 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -17,7 +18,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
+ import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType, uncheckedStableClass }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -65,6 +66,80 @@ abstract class TreeInfo {
false
}
+ /** Is `tree` a path, defined as follows? (Spec: 3.1 Paths)
+ *
+ * - The empty path ε (which cannot be written explicitly in user programs).
+ * - C.this, where C references a class.
+ * - p.x where p is a path and x is a stable member of p.
+ * - C.super.x or C.super[M].x where C references a class
+ * and x references a stable member of the super class or designated parent class M of C.
+ *
+ * NOTE: Trees with errors are (mostly) excluded.
+ *
+ * Path ::= StableId | [id ‘.’] this
+ *
+ */
+ def isPath(tree: Tree, allowVolatile: Boolean): Boolean =
+ tree match {
+ // Super is not technically a path.
+ // However, syntactically, it can only occur nested in a Select.
+ // This gives a nicer definition of isStableIdentifier that's equivalent to the spec's.
+ // must consider Literal(_) a path for typedSingletonTypeTree
+ case EmptyTree | Literal(_) => true
+ case This(_) | Super(_, _) => symOk(tree.symbol)
+ case _ => isStableIdentifier(tree, allowVolatile)
+ }
+
+ /** Is `tree` a stable identifier, a path which ends in an identifier?
+ *
+ * StableId ::= id
+ * | Path ‘.’ id
+ * | [id ’.’] ‘super’ [‘[’ id ‘]’] ‘.’ id
+ */
+ def isStableIdentifier(tree: Tree, allowVolatile: Boolean): Boolean =
+ tree match {
+ case Ident(_) => symOk(tree.symbol) && tree.symbol.isStable && !tree.symbol.hasVolatileType // TODO SPEC: not required by spec
+ case Select(qual, _) => isStableMemberOf(tree.symbol, qual, allowVolatile) && isPath(qual, allowVolatile)
+ case Apply(Select(free @ Ident(_), nme.apply), _) if free.symbol.name endsWith nme.REIFY_FREE_VALUE_SUFFIX =>
+ // see a detailed explanation of this trick in `GenSymbols.reifyFreeTerm`
+ free.symbol.hasStableFlag && isPath(free, allowVolatile)
+ case _ => false
+ }
+
+ private def symOk(sym: Symbol) = sym != null && !sym.isError && sym != NoSymbol
+ private def typeOk(tp: Type) = tp != null && ! tp.isError
+
+ /** Assuming `sym` is a member of `tree`, is it a "stable member"?
+ *
+ * Stable members are packages or members introduced
+ * by object definitions or by value definitions of non-volatile types (§3.6).
+ */
+ def isStableMemberOf(sym: Symbol, tree: Tree, allowVolatile: Boolean): Boolean = (
+ symOk(sym) && (!sym.isTerm || (sym.isStable && (allowVolatile || !sym.hasVolatileType))) &&
+ typeOk(tree.tpe) && (allowVolatile || !hasVolatileType(tree)) && !definitions.isByNameParamType(tree.tpe)
+ )
+
+ /** Is `tree`'s type volatile? (Ignored if its symbol has the @uncheckedStable annotation.)
+ */
+ def hasVolatileType(tree: Tree): Boolean =
+ symOk(tree.symbol) && tree.tpe.isVolatile && !tree.symbol.hasAnnotation(uncheckedStableClass)
+
+ /** Is `tree` either a non-volatile type,
+ * or a path that does not include any of:
+ * - a reference to a mutable variable/field
+ * - a reference to a by-name parameter
+ * - a member selection on a volatile type (Spec: 3.6 Volatile Types)?
+ *
+ * Such a tree is a suitable target for type selection.
+ */
+ def admitsTypeSelection(tree: Tree): Boolean = isPath(tree, allowVolatile = false)
+
+ /** Is `tree` admissible as a stable identifier pattern (8.1.5 Stable Identifier Patterns)?
+ *
+ * We disregard volatility, as it's irrelevant in patterns (SI-6815)
+ */
+ def isStableIdentifierPattern(tree: Tree): Boolean = isStableIdentifier(tree, allowVolatile = true)
+
// TODO SI-5304 tighten this up so we don't elide side effect in module loads
def isQualifierSafeToElide(tree: Tree): Boolean = isExprSafeToInline(tree)
@@ -202,16 +277,16 @@ abstract class TreeInfo {
* same object?
*/
def isSelfConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
+ case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ case _ => false
}
/** Is tree a super constructor call?
*/
def isSuperConstrCall(tree: Tree): Boolean = tree match {
case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ case _ => false
}
/**
@@ -401,8 +476,15 @@ abstract class TreeInfo {
/** Is this argument node of the form <expr> : _* ?
*/
def isWildcardStarArg(tree: Tree): Boolean = tree match {
- case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true
- case _ => false
+ case WildcardStarArg(_) => true
+ case _ => false
+ }
+
+ object WildcardStarArg {
+ def unapply(tree: Typed): Option[Tree] = tree match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => Some(expr)
+ case _ => None
+ }
}
/** If this tree has type parameters, those. Otherwise Nil.
@@ -465,9 +547,9 @@ abstract class TreeInfo {
/** Does this CaseDef catch Throwable? */
def catchesThrowable(cdef: CaseDef) = (
cdef.guard.isEmpty && (unbind(cdef.pat) match {
- case Ident(nme.WILDCARD) => true
- case i@Ident(name) => hasNoSymbol(i)
- case _ => false
+ case Ident(nme.WILDCARD) => true
+ case i@Ident(name) => hasNoSymbol(i)
+ case _ => false
})
)
@@ -524,6 +606,20 @@ abstract class TreeInfo {
case _ => false
}
+ /**
+ * {{{
+ * //------------------------ => effectivePatternArity(args)
+ * case Extractor(a) => 1
+ * case Extractor(a, b) => 2
+ * case Extractor((a, b)) => 2
+ * case Extractor(a @ (b, c)) => 2
+ * }}}
+ */
+ def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
+ case xs => xs
+ }).length
+
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
// tailcalls, cps,... use this flag combination to detect translated matches
@@ -663,20 +759,9 @@ abstract class TreeInfo {
unapply(dissectApplied(tree))
}
- /** Does list of trees start with a definition of
- * a class of module with given name (ignoring imports)
- */
- def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
- case Import(_, _) :: xs => firstDefinesClassOrObject(xs, name)
- case Annotated(_, tree1) :: Nil => firstDefinesClassOrObject(List(tree1), name)
- case ModuleDef(_, `name`, _) :: Nil => true
- case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => false
- }
-
-
/** Is this file the body of a compilation unit which should not
- * have Predef imported?
+ * have Predef imported? This is the case iff the first import in the
+ * unit explicitly refers to Predef.
*/
def noPredefImportForUnit(body: Tree) = {
// Top-level definition whose leading imports include Predef.
@@ -685,13 +770,7 @@ abstract class TreeInfo {
case Import(expr, _) => isReferenceToPredef(expr)
case _ => false
}
- // Compilation unit is class or object 'name' in package 'scala'
- def isUnitInScala(tree: Tree, name: Name) = tree match {
- case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name)
- case _ => false
- }
-
- isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body)
+ isLeadingPredefImport(body)
}
def isAbsTypeDef(tree: Tree) = tree match {
@@ -757,8 +836,18 @@ abstract class TreeInfo {
}
def unapply(tree: Tree) = refPart(tree) match {
- case ref: RefTree => Some((ref.qualifier.symbol, ref.symbol, dissectApplied(tree).targs))
- case _ => None
+ case ref: RefTree => {
+ val qual = ref.qualifier
+ val isBundle = definitions.isMacroBundleType(qual.tpe)
+ val owner =
+ if (isBundle) qual.tpe.typeSymbol
+ else {
+ val sym = if (qual.hasSymbolField) qual.symbol else NoSymbol
+ if (sym.isModule) sym.moduleClass else sym
+ }
+ Some((isBundle, owner, ref.symbol, dissectApplied(tree).targs))
+ }
+ case _ => None
}
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 410bc738e2..8781423a6d 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -252,6 +253,19 @@ trait Trees extends api.Trees { self: SymbolTable =>
def name: Name
}
+ object RefTree extends RefTreeExtractor {
+ def apply(qualifier: Tree, name: Name): RefTree = qualifier match {
+ case EmptyTree =>
+ Ident(name)
+ case qual if qual.isTerm =>
+ Select(qual, name)
+ case qual if qual.isType =>
+ assert(name.isTypeName, s"qual = $qual, name = $name")
+ SelectFromTypeTree(qual, name.toTypeName)
+ }
+ def unapply(refTree: RefTree): Option[(Tree, Name)] = Some((refTree.qualifier, refTree.name))
+ }
+
abstract class DefTree extends SymTree with NameTree with DefTreeApi {
def name: Name
override def isDef = true
@@ -943,6 +957,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (flags1 == flags) this
else Modifiers(flags1, privateWithin, annotations) setPositions positions
}
+ def | (flag: Int): Modifiers = this | flag.toLong
def | (flag: Long): Modifiers = {
val flags1 = flags | flag
if (flags1 == flags) this
@@ -1491,6 +1506,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Substitute symbols in `from` with symbols in `to`. Returns a new
* tree using the new symbols and whose Ident and Select nodes are
* name-consistent with the new symbols.
+ *
+ * Note: This is currently a destructive operation on the original Tree.
+ * Trees currently assigned a symbol in `from` will be assigned the new symbols
+ * without copying, and trees that define symbols with an `info` that refer
+ * a symbol in `from` will have a new type assigned.
*/
class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
@@ -1644,6 +1664,21 @@ trait Trees extends api.Trees { self: SymbolTable =>
sys.error("Not a ClassDef: " + t + "/" + t.getClass)
}
+ def copyModuleDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ impl: Template = null
+ ): ModuleDef = tree match {
+ case ModuleDef(mods0, name0, impl0) =>
+ treeCopy.ModuleDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (impl eq null) impl0 else impl
+ )
+ case t =>
+ sys.error("Not a ModuleDef: " + t + "/" + t.getClass)
+ }
+
def deriveDefDef(ddef: Tree)(applyToRhs: Tree => Tree): DefDef = ddef match {
case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
treeCopy.DefDef(ddef, mods0, name0, tparams0, vparamss0, tpt0, applyToRhs(rhs0))
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index d437b1b058..71f84ab557 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package internal
trait TypeDebugging {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index e3b34b83e9..967146a130 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable, generic }
@@ -106,6 +107,18 @@ trait Types
protected val enableTypeVarExperimentals = settings.Xexperimental.value
+ /** Caching the most recent map has a 75-90% hit rate. */
+ private object substTypeMapCache {
+ private[this] var cached: SubstTypeMap = new SubstTypeMap(Nil, Nil)
+
+ def apply(from: List[Symbol], to: List[Type]): SubstTypeMap = {
+ if ((cached.from ne from) || (cached.to ne to))
+ cached = new SubstTypeMap(from, to)
+
+ cached
+ }
+ }
+
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
@@ -394,7 +407,7 @@ trait Types
/** For a class with nonEmpty parents, the first parent.
* Otherwise some specific fixed top type.
*/
- def firstParent = if (parents.nonEmpty) parents.head else ObjectClass.tpe
+ def firstParent = if (parents.nonEmpty) parents.head else ObjectTpe
/** For a typeref or single-type, the prefix of the normalized type (@see normalize).
* NoType for all other types. */
@@ -697,8 +710,7 @@ trait Types
* symbols `from` in this type.
*/
def subst(from: List[Symbol], to: List[Type]): Type =
- if (from.isEmpty) this
- else new SubstTypeMap(from, to) apply this
+ if (from.isEmpty) this else substTypeMapCache(from, to)(this)
/** Substitute symbols `to` for occurrences of symbols `from` in this type.
*
@@ -742,6 +754,8 @@ trait Types
def map[T](f: Type => T): List[T] = collect(Type.this) map f
}
+ @inline final def orElse(alt: => Type): Type = if (this ne NoType) this else alt
+
/** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
*/
@@ -871,7 +885,7 @@ trait Types
def baseTypeSeqDepth: Int = 1
/** The list of all baseclasses of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
+ * in linearization order, starting with the class itself and ending
* in class Any.
*/
def baseClasses: List[Symbol] = List()
@@ -958,7 +972,7 @@ trait Types
var sym: Symbol = NoSymbol
var e: ScopeEntry = decls.lookupEntry(name)
while (e ne null) {
- if (!e.sym.hasFlag(excludedFlags)) {
+ if (!e.sym.hasFlag(excludedFlags.toLong)) {
if (sym == NoSymbol) sym = e.sym
else {
if (alts.isEmpty) alts = sym :: Nil
@@ -1063,6 +1077,14 @@ trait Types
continue = false
val bcs0 = baseClasses
var bcs = bcs0
+ // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ def admitPrivateLocal(owner: Symbol): Boolean = {
+ val selectorClass = this match {
+ case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+ case _ => bcs0.head
+ }
+ selectorClass.hasTransOwner(owner)
+ }
while (!bcs.isEmpty) {
val decls = bcs.head.info.decls
var entry = decls.lookupEntry(name)
@@ -1072,11 +1094,11 @@ trait Types
if ((flags & required) == required) {
val excl = flags & excluded
if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (
(bcs eq bcs0) ||
(flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
- if (name.isTypeName || stableOnly && sym.isStable) {
+ admitPrivateLocal(bcs.head))) {
+ if (name.isTypeName || (stableOnly && sym.isStable && !sym.hasVolatileType)) {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
return sym
} else if (member eq NoSymbol) {
@@ -1353,7 +1375,7 @@ trait Types
// more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
- override def isVolatile : Boolean = underlying.isVolatile && !sym.isStable
+ override def isVolatile : Boolean = underlying.isVolatile && (sym.hasVolatileType || !sym.isStable)
/*
override def narrow: Type = {
if (phase.erasedTypes) this
@@ -1432,22 +1454,36 @@ trait Types
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
- private def lowerString = if (emptyLowerBound) "" else " >: " + lo
- private def upperString = if (emptyUpperBound) "" else " <: " + hi
private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard
private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard
def isEmptyBounds = emptyLowerBound && emptyUpperBound
- override def safeToString = lowerString + upperString
+ override def safeToString = scalaNotation(_.toString)
+
+ /** Bounds notation used in Scala sytanx.
+ * For example +This <: scala.collection.generic.Sorted[K,This].
+ */
+ private[internal] def scalaNotation(typeString: Type => String): String = {
+ (if (emptyLowerBound) "" else " >: " + typeString(lo)) +
+ (if (emptyUpperBound) "" else " <: " + typeString(hi))
+ }
+ /** Bounds notation used in http://adriaanm.github.com/files/higher.pdf.
+ * For example *(scala.collection.generic.Sorted[K,This]).
+ */
+ private[internal] def starNotation(typeString: Type => String): String = {
+ if (emptyLowerBound && emptyUpperBound) ""
+ else if (emptyLowerBound) "(" + typeString(hi) + ")"
+ else "(%s, %s)" format (typeString(lo), typeString(hi))
+ }
override def kind = "TypeBoundsType"
}
final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
object TypeBounds extends TypeBoundsExtractor {
- def empty: TypeBounds = apply(NothingClass.tpe, AnyClass.tpe)
- def upper(hi: Type): TypeBounds = apply(NothingClass.tpe, hi)
- def lower(lo: Type): TypeBounds = apply(lo, AnyClass.tpe)
+ def empty: TypeBounds = apply(NothingTpe, AnyTpe)
+ def upper(hi: Type): TypeBounds = apply(NothingTpe, hi)
+ def lower(lo: Type): TypeBounds = apply(lo, AnyTpe)
def apply(lo: Type, hi: Type): TypeBounds = {
unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
}
@@ -2419,6 +2455,7 @@ trait Types
else
super.prefixString
)
+ def copy(pre: Type = this.pre, sym: Symbol = this.sym, args: List[Type] = this.args) = TypeRef(pre, sym, args)
override def kind = "TypeRef"
}
@@ -2455,7 +2492,7 @@ trait Types
if (!isValidForBaseClasses(period)) {
tpe.parentsCache = tpe.thisInfo.parents map tpe.transform
} else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
- tpe.parentsCache = List(AnyClass.tpe)
+ tpe.parentsCache = List(AnyTpe)
}
}
}
@@ -2956,12 +2993,14 @@ trait Types
/** The variable's skolemization level */
val level = skolemizationLevel
- /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
- * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
+ /** Applies this TypeVar to type arguments, if arity matches.
*
- * `constr` for `?CC` only tracks type constructors anyway,
- * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
- * `?CC's` hibounds contains List and Iterable.
+ * Different applications of the same type constructor variable `?CC`,
+ * e.g. `?CC[Int]` and `?CC[String]`, are modeled as distinct instances of `TypeVar`
+ * that share a `TypeConstraint`, so that the comparisons `?CC[Int] <:< List[Int]`
+ * and `?CC[String] <:< Iterable[String]` result in `?CC` being upper-bounded by `List` and `Iterable`.
+ *
+ * Applying the wrong number of type args results in a TypeVar whose instance is set to `ErrorType`.
*/
def applyArgs(newArgs: List[Type]): TypeVar = (
if (newArgs.isEmpty && typeArgs.isEmpty)
@@ -2971,7 +3010,7 @@ trait Types
TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
}
else
- throw new Error("Invalid type application in TypeVar: " + params + ", " + newArgs)
+ TypeVar(typeSymbol).setInst(ErrorType)
)
// newArgs.length may differ from args.length (could've been empty before)
//
@@ -3001,16 +3040,17 @@ trait Types
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
- def setInst(tp: Type) {
+ def setInst(tp: Type): this.type = {
if (tp eq this) {
log(s"TypeVar cycle: called setInst passing $this to itself.")
- return
+ return this
}
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
val res = if (shouldRepackType) repackExistential(tp) else tp
constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
+ this
}
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
@@ -3059,7 +3099,7 @@ trait Types
else lhs <:< rhs
}
- /** Simple case: type arguments can be ignored, because either this typevar has
+ /* Simple case: type arguments can be ignored, because either this typevar has
* no type parameters, or we are comparing to Any/Nothing.
*
* The latter condition is needed because HK unification is limited to constraints of the shape
@@ -3086,7 +3126,7 @@ trait Types
} else false
}
- /** Full case: involving a check of the form
+ /* Full case: involving a check of the form
* {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
* }}}
@@ -3098,10 +3138,9 @@ trait Types
sameLength(typeArgs, tp.typeArgs) && {
val lhs = if (isLowerBound) tp.typeArgs else typeArgs
val rhs = if (isLowerBound) typeArgs else tp.typeArgs
- // this is a higher-kinded type var with same arity as tp.
- // side effect: adds the type constructor itself as a bound
- addBound(tp.typeConstructor)
- isSubArgs(lhs, rhs, params, AnyDepth)
+ // This is a higher-kinded type var with same arity as tp.
+ // If so (see SI-7517), side effect: adds the type constructor itself as a bound.
+ isSubArgs(lhs, rhs, params, AnyDepth) && { addBound(tp.typeConstructor); true }
}
}
// The type with which we can successfully unify can be hidden
@@ -3394,7 +3433,7 @@ trait Types
/** Rebind symbol `sym` to an overriding member in type `pre`. */
private def rebind(pre: Type, sym: Symbol): Symbol = {
if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
- else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable) orElse sym
+ else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || (sym.isStable && !sym.hasVolatileType)) orElse sym
}
/** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
@@ -3423,7 +3462,7 @@ trait Types
/** the canonical creator for a refined type with a given scope */
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
if (phase.erasedTypes)
- if (parents.isEmpty) ObjectClass.tpe else parents.head
+ if (parents.isEmpty) ObjectTpe else parents.head
else {
val clazz = owner.newRefinementClass(pos)
val result = RefinedType(parents, decls, clazz)
@@ -3648,13 +3687,13 @@ trait Types
// Hash consing --------------------------------------------------------------
private val initialUniquesCapacity = 4096
- private var uniques: util.HashSet[Type] = _
+ private var uniques: util.WeakHashSet[Type] = _
private var uniqueRunId = NoRunId
protected def unique[T <: Type](tp: T): T = {
if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
- uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
+ uniques = util.WeakHashSet[Type](initialUniquesCapacity)
perRunCaches.recordCache(uniques)
uniqueRunId = currentRunId
}
@@ -3680,6 +3719,43 @@ trait Types
object unwrapToStableClass extends ClassUnwrapper(existential = false) { }
object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { }
+ def elementExtract(container: Symbol, tp: Type): Type = {
+ assert(!container.isAliasType, container)
+ unwrapWrapperTypes(tp baseType container).dealiasWiden match {
+ case TypeRef(_, `container`, arg :: Nil) => arg
+ case _ => NoType
+ }
+ }
+ def elementExtractOption(container: Symbol, tp: Type): Option[Type] = {
+ elementExtract(container, tp) match {
+ case NoType => None
+ case tp => Some(tp)
+ }
+ }
+ def elementTest(container: Symbol, tp: Type)(f: Type => Boolean): Boolean = {
+ elementExtract(container, tp) match {
+ case NoType => false
+ case tp => f(tp)
+ }
+ }
+ def elementTransform(container: Symbol, tp: Type)(f: Type => Type): Type = {
+ elementExtract(container, tp) match {
+ case NoType => NoType
+ case tp => f(tp)
+ }
+ }
+
+ def transparentShallowTransform(container: Symbol, tp: Type)(f: Type => Type): Type = {
+ def loop(tp: Type): Type = tp match {
+ case tp @ AnnotatedType(_, underlying, _) => tp.copy(underlying = loop(underlying))
+ case tp @ ExistentialType(_, underlying) => tp.copy(underlying = loop(underlying))
+ case tp @ PolyType(_, resultType) => tp.copy(resultType = loop(resultType))
+ case tp @ NullaryMethodType(resultType) => tp.copy(resultType = loop(resultType))
+ case tp => elementTransform(container, tp)(el => appliedType(container, f(el))).orElse(f(tp))
+ }
+ loop(tp)
+ }
+
/** Repack existential types, otherwise they sometimes get unpacked in the
* wrong location (type inference comes up with an unexpected skolem)
*/
@@ -3884,9 +3960,14 @@ trait Types
}
}
- def normalizePlus(tp: Type) =
+ def normalizePlus(tp: Type) = (
if (isRawType(tp)) rawToExistential(tp)
- else tp.normalize
+ else tp.normalize match {
+ // Unify the two representations of module classes
+ case st @ SingleType(_, sym) if sym.isModule => st.underlying.normalize
+ case _ => tp.normalize
+ }
+ )
/*
todo: change to:
@@ -3948,6 +4029,16 @@ trait Types
case _ => false
}
+ def isExistentialType(tp: Type): Boolean = tp.dealias match {
+ case ExistentialType(_, _) => true
+ case _ => false
+ }
+
+ def isImplicitMethodType(tp: Type) = tp match {
+ case mt: MethodType => mt.isImplicit
+ case _ => false
+ }
+
/** This is defined and named as it is because the goal is to exclude source
* level types which are not value types (e.g. MethodType) without excluding
* necessary internal types such as WildcardType. There are also non-value
@@ -4085,7 +4176,7 @@ trait Types
val info1 = tp1.memberInfo(sym1)
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
//System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
- ( sym2.isTerm && isSubType(info1, info2, depth) && (!sym2.isStable || sym1.isStable)
+ ( sym2.isTerm && isSubType(info1, info2, depth) && (!sym2.isStable || sym1.isStable) && (!sym1.hasVolatileType || sym2.hasVolatileType)
|| sym2.isAbstractType && {
val memberTp1 = tp1.memberType(sym1)
// println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
@@ -4289,12 +4380,11 @@ trait Types
/** Compute lub (if `variance == Covariant`) or glb (if `variance == Contravariant`) of given list
* of types `tps`. All types in `tps` are typerefs or singletypes
* with the same symbol.
- * Return `Some(x)` if the computation succeeds with result `x`.
- * Return `None` if the computation fails.
+ * Return `x` if the computation succeeds with result `x`.
+ * Return `NoType` if the computation fails.
*/
- def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Int): Option[Type] = tps match {
- case List(tp) =>
- Some(tp)
+ def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Int): Type = tps match {
+ case tp :: Nil => tp
case TypeRef(_, sym, _) :: rest =>
val pres = tps map (_.prefix) // prefix normalizes automatically
val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
@@ -4306,12 +4396,13 @@ trait Types
// if argss contain one value type and some other type, the lub is Object
// if argss contain several reference types, the lub is an array over lub of argtypes
if (argss exists typeListIsEmpty) {
- None // something is wrong: an array without a type arg.
- } else {
+ NoType // something is wrong: an array without a type arg.
+ }
+ else {
val args = argss map (_.head)
- if (args.tail forall (_ =:= args.head)) Some(typeRef(pre, sym, List(args.head)))
- else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
- else Some(typeRef(pre, sym, List(lub(args))))
+ if (args.tail forall (_ =:= args.head)) typeRef(pre, sym, List(args.head))
+ else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) ObjectTpe
+ else typeRef(pre, sym, List(lub(args)))
}
}
else transposeSafe(argss) match {
@@ -4319,8 +4410,8 @@ trait Types
// transpose freaked out because of irregular argss
// catching just in case (shouldn't happen, but also doesn't cost us)
// [JZ] It happens: see SI-5683.
- debuglog("transposed irregular matrix!?" +(tps, argss))
- None
+ debuglog(s"transposed irregular matrix!? tps=$tps argss=$argss")
+ NoType
case Some(argsst) =>
val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
val as = as0.distinct
@@ -4351,22 +4442,22 @@ trait Types
}
}
}
- if (args contains NoType) None
- else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
+ if (args contains NoType) NoType
+ else existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args))
}
} catch {
- case ex: MalformedType => None
+ case ex: MalformedType => NoType
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
- try {
- Some(singleType(pre, sym))
- } catch {
- case ex: MalformedType => None
- }
+ try singleType(pre, sym)
+ catch { case ex: MalformedType => NoType }
case ExistentialType(tparams, quantified) :: rest =>
- mergePrefixAndArgs(quantified :: rest, variance, depth) map (existentialAbstraction(tparams, _))
+ mergePrefixAndArgs(quantified :: rest, variance, depth) match {
+ case NoType => NoType
+ case tpe => existentialAbstraction(tparams, tpe)
+ }
case _ =>
abort(s"mergePrefixAndArgs($tps, $variance, $depth): unsupported tps")
}
@@ -4424,11 +4515,11 @@ trait Types
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
- Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
+ inform(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
indent = indent + " "
val result = p(tp1, arg2)
indent = indent stripSuffix " "
- Console.println(indent + result)
+ inform(indent + result)
result
}
@@ -4449,18 +4540,18 @@ trait Types
}
def isUnboundedGeneric(tp: Type) = tp match {
- case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefClass.tpe)
+ case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefTpe)
case _ => false
}
def isBoundedGeneric(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
+ case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefTpe)
case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym)
case _ => false
}
// Add serializable to a list of parents, unless one of them already is
def addSerializable(ps: Type*): List[Type] = (
if (ps exists typeIsSubTypeOfSerializable) ps.toList
- else (ps :+ SerializableClass.tpe).toList
+ else (ps :+ SerializableTpe).toList
)
/** Members of the given class, other than those inherited
@@ -4473,7 +4564,7 @@ trait Types
def importableMembers(pre: Type): Scope = pre.members filter isImportable
def objToAny(tp: Type): Type =
- if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
+ if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyTpe
else tp
val shorthands = Set(
@@ -4497,7 +4588,7 @@ trait Types
private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
- private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableClass.tpe
+ private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
private[scala] val typeIsNothing = (tp: Type) => tp.typeSymbolDirect eq NothingClass
private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala
index 007d56eb35..3480161567 100644
--- a/src/reflect/scala/reflect/internal/Variance.scala
+++ b/src/reflect/scala/reflect/internal/Variance.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Variance._
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
index 716e49b303..78df3c9617 100644
--- a/src/reflect/scala/reflect/internal/Variances.scala
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Variance._
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
index 058ff61fbf..2c9f909629 100644
--- a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
+++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package annotations
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
index 367a3b8b19..eb266e8125 100644
--- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.reflect.internal.pickling
+package scala
+package reflect.internal.pickling
object ByteCodecs {
@@ -127,7 +128,7 @@ object ByteCodecs {
var j = 0
val dstlen = (srclen * 7 + 7) / 8
while (i + 7 < srclen) {
- var out: Int = src(i)
+ var out: Int = src(i).toInt
var in: Byte = src(i + 1)
src(j) = (out | (in & 0x01) << 7).toByte
out = in >>> 1
@@ -152,7 +153,7 @@ object ByteCodecs {
j += 7
}
if (i < srclen) {
- var out: Int = src(i)
+ var out: Int = src(i).toInt
if (i + 1 < srclen) {
var in: Byte = src(i + 1)
src(j) = (out | (in & 0x01) << 7).toByte; j += 1
@@ -195,10 +196,10 @@ object ByteCodecs {
*
* Sometimes returns (length+1) of the decoded array. Example:
*
- * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3))
* enc: Array[Byte] = Array(2, 5, 13, 1)
*
- * scala> reflect.generic.ByteCodecs.decode(enc)
+ * scala> scala.reflect.generic.ByteCodecs.decode(enc)
* res43: Int = 4
*
* scala> enc
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
index c9dfb7fe71..a814256f8e 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
package pickling
@@ -93,7 +94,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
/** Read a byte */
def readByte(): Int = {
- val x = bytes(readIndex); readIndex += 1; x
+ val x = bytes(readIndex).toInt; readIndex += 1; x
}
/** Read a natural number in big endian format, base 128.
@@ -104,7 +105,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
var b = 0L
var x = 0L
do {
- b = readByte()
+ b = readByte().toLong
x = (x << 7) + (b & 0x7f)
} while ((b & 0x80) != 0L)
x
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
index 3722c77aa2..ce0ceec688 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package pickling
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index c940d863f7..6cffdbc193 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
package pickling
@@ -851,7 +852,8 @@ abstract class UnPickler {
private val p = phase
override def complete(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
- enteringPhase(p) (sym setInfo tp)
+ if (p ne null)
+ enteringPhase(p) (sym setInfo tp)
if (currentRunId != definedAtRunId)
sym.setInfo(adaptToNewRunMap(tp))
}
diff --git a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
index a6fb4187ca..859f703d97 100644
--- a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect.internal
+package scala
+package reflect.internal
package settings
/** A Settings abstraction boiled out of the original highly mutable Settings
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 68f9fc8e83..e21e95903b 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.reflect.internal
+package scala
+package reflect.internal
package settings
/** A mutable Settings object.
diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
index e5ddd8f359..f879960407 100644
--- a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
+++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package tpe
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 921d2e3d66..1d3c6b0f23 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -1,8 +1,10 @@
-package scala.reflect
+package scala
+package reflect
package internal
package tpe
-import scala.collection.{ mutable }
+import scala.collection.mutable
+import scala.annotation.tailrec
import util.Statistics
import Variance._
@@ -11,7 +13,7 @@ private[internal] trait GlbLubs {
import definitions._
import TypesStats._
- private final val printLubs = sys.props contains "scalac.debug.lub"
+ private final val printLubs = scala.sys.props contains "scalac.debug.lub"
/** In case anyone wants to turn off lub verification without reverting anything. */
private final val verifyLubs = true
@@ -86,7 +88,7 @@ private[internal] trait GlbLubs {
case _ => tp
}
// pretypes is a tail-recursion-preserving accumulator.
- @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
+ @tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
lubListDepth += 1
if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
@@ -123,8 +125,8 @@ private[internal] trait GlbLubs {
}
val tails = tsBts map (_.tail)
mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match {
- case Some(tp) => loop(tp :: pretypes, tails)
- case _ => loop(pretypes, tails)
+ case NoType => loop(pretypes, tails)
+ case tp => loop(tp :: pretypes, tails)
}
}
else {
@@ -214,17 +216,40 @@ private[internal] trait GlbLubs {
(strippedTypes, quantified)
}
- def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
- else if (ts exists typeHasAnnotations)
- (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
- else (lub(ts), false)
+ /** Does this set of types have the same weak lub as
+ * it does regular lub? This is exposed so lub callers
+ * can discover whether the trees they are typing will
+ * may require further adaptation. It may return false
+ * negatives, but it will not return false positives.
+ */
+ def sameWeakLubAsLub(tps: List[Type]) = tps match {
+ case Nil => true
+ case tp :: Nil => !typeHasAnnotations(tp)
+ case tps => !(tps exists typeHasAnnotations) && !(tps forall isNumericValueType)
+ }
+
+ /** If the arguments are all numeric value types, the numeric
+ * lub according to the weak conformance spec. If any argument
+ * has type annotations, take the lub of the unannotated type
+ * and call the analyzerPlugin method annotationsLub so it can
+ * be further altered. Otherwise, the regular lub.
+ */
+ def weakLub(tps: List[Type]): Type = (
+ if (tps.isEmpty)
+ NothingTpe
+ else if (tps forall isNumericValueType)
+ numericLub(tps)
+ else if (tps exists typeHasAnnotations)
+ annotationsLub(lub(tps map (_.withoutAnnotations)), tps)
+ else
+ lub(tps)
+ )
def numericLub(ts: List[Type]) =
ts reduceLeft ((t1, t2) =>
if (isNumericSubType(t1, t2)) t2
else if (isNumericSubType(t2, t1)) t1
- else IntClass.tpe)
+ else IntTpe)
private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
@@ -247,9 +272,9 @@ private[internal] trait GlbLubs {
}
def lub(ts: List[Type]): Type = ts match {
- case List() => NothingClass.tpe
- case List(t) => t
- case _ =>
+ case Nil => NothingTpe
+ case t :: Nil => t
+ case _ =>
if (Statistics.canEnable) Statistics.incCounter(lubCount)
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
try {
@@ -276,7 +301,7 @@ private[internal] trait GlbLubs {
/** The least upper bound wrt <:< of a list of types */
protected[internal] def lub(ts: List[Type], depth: Int): Type = {
def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
- case List() => NothingClass.tpe
+ case List() => NothingTpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
@@ -291,12 +316,12 @@ private[internal] trait GlbLubs {
case ts @ AnnotatedType(annots, tpe, _) :: rest =>
annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
case ts =>
- lubResults get (depth, ts) match {
+ lubResults get ((depth, ts)) match {
case Some(lubType) =>
lubType
case None =>
- lubResults((depth, ts)) = AnyClass.tpe
- val res = if (depth < 0) AnyClass.tpe else lub1(ts)
+ lubResults((depth, ts)) = AnyTpe
+ val res = if (depth < 0) AnyTpe else lub1(ts)
lubResults((depth, ts)) = res
res
}
@@ -412,7 +437,7 @@ private[internal] trait GlbLubs {
/** The greatest lower bound of a list of types (as determined by `<:<`). */
def glb(ts: List[Type]): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
+ case List() => AnyTpe
case List(t) => t
case ts0 =>
if (Statistics.canEnable) Statistics.incCounter(lubCount)
@@ -427,7 +452,7 @@ private[internal] trait GlbLubs {
}
protected[internal] def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
+ case List() => AnyTpe
case List(t) => t
case ts0 => glbNorm(ts0, depth)
}
@@ -436,7 +461,7 @@ private[internal] trait GlbLubs {
* with regard to `elimSuper`. */
protected def glbNorm(ts: List[Type], depth: Int): Type = {
def glb0(ts0: List[Type]): Type = ts0 match {
- case List() => AnyClass.tpe
+ case List() => AnyTpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
@@ -449,12 +474,12 @@ private[internal] trait GlbLubs {
case ts @ TypeBounds(_, _) :: rest =>
TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
case ts =>
- glbResults get (depth, ts) match {
+ glbResults get ((depth, ts)) match {
case Some(glbType) =>
glbType
case _ =>
- glbResults((depth, ts)) = NothingClass.tpe
- val res = if (depth < 0) NothingClass.tpe else glb1(ts)
+ glbResults((depth, ts)) = NothingTpe
+ val res = if (depth < 0) NothingTpe else glb1(ts)
glbResults((depth, ts)) = res
res
}
@@ -531,8 +556,8 @@ private[internal] trait GlbLubs {
existentialAbstraction(tparams, glbType)
} catch {
case GlbFailure =>
- if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
- else NothingClass.tpe
+ if (ts forall (t => NullTpe <:< t)) NullTpe
+ else NothingTpe
}
}
// if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index d408857cf3..63f17dff34 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -1,10 +1,10 @@
-package scala.reflect
+package scala
+package reflect
package internal
package tpe
import scala.collection.{ mutable }
-import Flags._
-import util.Statistics
+import util.{ Statistics, TriState }
import scala.annotation.tailrec
trait TypeComparers {
@@ -57,9 +57,12 @@ trait TypeComparers {
} else
false
- private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
- else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+ private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = (
+ if (sym1 == sym2)
+ sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ else
+ (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+ )
def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
@@ -115,185 +118,118 @@ trait TypeComparers {
// if (subsametypeRecursions == 0) undoLog.clear()
}
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp1.typeSymbol.isPackageClass
- else {
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
+ // @pre: at least one argument has annotations
+ private def sameAnnotatedTypes(tp1: Type, tp2: Type) = (
+ annotationsConform(tp1, tp2)
+ && annotationsConform(tp2, tp1)
+ && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
+ )
+ // We flush out any AnnotatedTypes before calling isSameType2 because
+ // unlike most other subclasses of Type, we have to allow for equivalence of any
+ // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
+ // logic of "annotationsConform" is arbitrary and unknown.
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => sameAnnotatedTypes(tp1, tp2)
+ case _ => isSameType2(tp1, tp2)
+ }
+
+ private def isSameHKTypes(tp1: Type, tp2: Type) = (
+ tp1.isHigherKinded
+ && tp2.isHigherKinded
+ && (tp1.normalize =:= tp2.normalize)
+ )
+ private def isSameTypeRef(tr1: TypeRef, tr2: TypeRef) = (
+ equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre)
+ && (isSameHKTypes(tr1, tr2) || isSameTypes(tr1.args, tr2.args))
+ )
+
+ private def isSameSingletonType(tp1: SingletonType, tp2: SingletonType): Boolean = {
+ // We don't use dealiasWiden here because we are looking for the SAME type,
+ // and widening leads to a less specific type. The logic is along the lines of
+ // dealiasAndFollowUnderlyingAsLongAsTheTypeIsEquivalent. This method is only
+ // called after a surface comparison has failed, so if chaseDealiasedUnderlying
+ // does not produce a type other than tp1 and tp2, return false.
+ @tailrec def chaseDealiasedUnderlying(tp: Type): Type = tp.underlying.dealias match {
+ case next: SingletonType if tp ne next => chaseDealiasedUnderlying(next)
+ case _ => tp
}
+ val origin1 = chaseDealiasedUnderlying(tp1)
+ val origin2 = chaseDealiasedUnderlying(tp2)
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ }
+
+ private def isSameMethodType(mt1: MethodType, mt2: MethodType) = (
+ isSameTypes(mt1.paramTypes, mt2.paramTypes)
+ && (mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params))
+ && (mt1.isImplicit == mt2.isImplicit)
+ )
+
+ private def equalTypeParamsAndResult(tparams1: List[Symbol], res1: Type, tparams2: List[Symbol], res2: Type) = {
+ def subst(info: Type) = info.substSym(tparams2, tparams1)
+ // corresponds does not check length of two sequences before checking the predicate,
+ // but SubstMap assumes it has been checked (SI-2956)
+ ( sameLength(tparams1, tparams2)
+ && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info))
+ && (res1 =:= subst(res2))
+ )
}
def isSameType2(tp1: Type, tp2: Type): Boolean = {
- tp1 match {
- case tr1: TypeRef =>
- tp2 match {
- case tr2: TypeRef =>
- return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))) ||
- ((tr1.pre, tr2.pre) match {
- case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
- case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
- case _ => false
- })
- case _: SingleType =>
- return isSameType2(tp2, tp1) // put singleton type on the left, caught below
- case _ =>
- }
- case tt1: ThisType =>
- tp2 match {
- case tt2: ThisType =>
- if (tt1.sym == tt2.sym) return true
- case _ =>
- }
- case st1: SingleType =>
- tp2 match {
- case st2: SingleType =>
- if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
- case TypeRef(pre2, sym2, Nil) =>
- if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
- case _ =>
- }
- case ct1: ConstantType =>
- tp2 match {
- case ct2: ConstantType =>
- return (ct1.value == ct2.value)
- case _ =>
- }
- case rt1: RefinedType =>
- tp2 match {
- case rt2: RefinedType => //
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- return isSameTypes(rt1.parents, rt2.parents) && {
- val decls1 = rt1.decls
- val decls2 = rt2.decls
- isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
- }
- case _ =>
- }
- case mt1: MethodType =>
- tp2 match {
- case mt2: MethodType =>
- return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
- mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
- mt1.isImplicit == mt2.isImplicit
- // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
- case _ =>
- }
- case NullaryMethodType(restpe1) =>
- tp2 match {
- // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
- case NullaryMethodType(restpe2) =>
- return restpe1 =:= restpe2
- case _ =>
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
- // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- // @M looks like it might suffer from same problem as #2210
- return (
- (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- // @M looks like it might suffer from same problem as #2210
- return (
- // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
- sameLength(tparams1, tparams2) &&
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case TypeBounds(lo1, hi1) =>
- tp2 match {
- case TypeBounds(lo2, hi2) =>
- return lo1 =:= lo2 && hi1 =:= hi2
- case _ =>
- }
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp2
- case _ =>
- }
- tp2 match {
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp1
- case _ =>
- }
- tp1 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp2, typeVarLHS = true)
- case _ =>
- }
- tp2 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp1, typeVarLHS = false)
- case _ =>
- }
- tp1 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
+
+ /* Here we highlight those unfortunate type-like constructs which
+ * are hidden bundles of mutable state, cruising the type system picking
+ * up any type constraints naive enough to get into their hot rods.
+ */
+ def mutateNonTypeConstructs(lhs: Type, rhs: Type) = lhs match {
+ case BoundedWildcardType(bounds) => bounds containsType rhs
+ case tv @ TypeVar(_, _) => tv.registerTypeEquality(rhs, typeVarLHS = lhs eq tp1)
+ case TypeRef(tv @ TypeVar(_, _), sym, _) => tv.registerTypeSelection(sym, rhs)
+ case _ => false
}
- tp2 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
+ /* SingletonType receives this additional scrutiny because there are
+ * a variety of Types which must be treated as equivalent even if they
+ * arrive in different guises. For instance, object Foo in the following
+ * might appear in (at least) the four given below.
+ *
+ * package pkg { object Foo ; type Bar = Foo.type }
+ *
+ * ModuleClassTypeRef(pkg.type, Foo: ModuleClassSymbol, Nil)
+ * ThisType(Foo: ModuleClassSymbol)
+ * SingleType(pkg.type, Foo: ModuleSymbol)
+ * AliasTypeRef(NoPrefix, sym: AliasSymbol, Nil) where sym.info is one of the above
+ */
+ def sameSingletonType = tp1 match {
+ case tp1: SingletonType => tp2 match {
+ case tp2: SingletonType => isSameSingletonType(tp1, tp2)
+ case _ => false
+ }
+ case _ => false
}
- tp1 match {
- case _: SingletonType =>
- tp2 match {
- case _: SingletonType =>
- def chaseDealiasedUnderlying(tp: Type): Type = {
- var origin = tp
- var next = origin.underlying.dealias
- while (next.isInstanceOf[SingletonType]) {
- assert(origin ne next, origin)
- origin = next
- next = origin.underlying.dealias
- }
- origin
- }
- val origin1 = chaseDealiasedUnderlying(tp1)
- val origin2 = chaseDealiasedUnderlying(tp2)
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }
- case _ =>
- false
+ /* Those false cases certainly are ugly. There's a proposed SIP to deuglify it.
+ * https://docs.google.com/a/improving.org/document/d/1onPrzSqyDpHScc9PS_hpxJwa3FlPtthxw-bAuuEe8uA
+ */
+ def sameTypeAndSameCaseClass = tp1 match {
+ case tp1: TypeRef => tp2 match { case tp2: TypeRef => isSameTypeRef(tp1, tp2) ; case _ => false }
+ case tp1: MethodType => tp2 match { case tp2: MethodType => isSameMethodType(tp1, tp2) ; case _ => false }
+ case RefinedType(ps1, decls1) => tp2 match { case RefinedType(ps2, decls2) => isSameTypes(ps1, ps2) && (decls1 isSameScope decls2) ; case _ => false }
+ case SingleType(pre1, sym1) => tp2 match { case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1, pre1, sym2, pre2) ; case _ => false }
+ case PolyType(ps1, res1) => tp2 match { case PolyType(ps2, res2) => equalTypeParamsAndResult(ps1, res1, ps2, res2) ; case _ => false }
+ case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2) ; case _ => false }
+ case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 == sym2 ; case _ => false }
+ case ConstantType(c1) => tp2 match { case ConstantType(c2) => c1 == c2 ; case _ => false }
+ case NullaryMethodType(res1) => tp2 match { case NullaryMethodType(res2) => res1 =:= res2 ; case _ => false }
+ case TypeBounds(lo1, hi1) => tp2 match { case TypeBounds(lo2, hi2) => lo1 =:= lo2 && hi1 =:= hi2 ; case _ => false }
+ case _ => false
}
+
+ ( sameTypeAndSameCaseClass
+ || sameSingletonType
+ || mutateNonTypeConstructs(tp1, tp2)
+ || mutateNonTypeConstructs(tp2, tp1)
+ || retry(normalizePlus(tp1), normalizePlus(tp2))
+ )
}
def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
@@ -333,12 +269,12 @@ trait TypeComparers {
else
try {
pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
+ isSubType1(tp1, tp2, depth)
} finally {
pendingSubTypes -= p
}
} else {
- isSubType2(tp1, tp2, depth)
+ isSubType1(tp1, tp2, depth)
}
} finally if (!result) undoLog.undoTo(before)
@@ -351,6 +287,39 @@ trait TypeComparers {
// if (subsametypeRecursions == 0) undoLog.clear()
}
+ /** Check whether the subtype or type equivalence relationship
+ * between the argument is predetermined. Returns a tri-state
+ * value: True means the arguments are always sub/same types,
+ * False means they never are, and Unknown means the caller
+ * will have to figure things out.
+ */
+ private def typeRelationPreCheck(tp1: Type, tp2: Type): TriState = {
+ def isTrue = (
+ (tp1 eq tp2)
+ || isErrorOrWildcard(tp1)
+ || isErrorOrWildcard(tp2)
+ || (tp1 eq NoPrefix) && tp2.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ || (tp2 eq NoPrefix) && tp1.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ )
+ // isFalse, assuming !isTrue
+ def isFalse = (
+ (tp1 eq NoType)
+ || (tp2 eq NoType)
+ || (tp1 eq NoPrefix)
+ || (tp2 eq NoPrefix)
+ )
+
+ if (isTrue) TriState.True
+ else if (isFalse) TriState.False
+ else TriState.Unknown
+ }
+
+ private def isSubType1(tp1: Type, tp2: Type, depth: Int): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => annotationsConform(tp1, tp2) && (tp1.withoutAnnotations <:< tp2.withoutAnnotations)
+ case _ => isSubType2(tp1, tp2, depth)
+ }
+
private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
val PolyType(tparams1, res1) = tp1
val PolyType(tparams2, res2) = tp2
@@ -389,12 +358,13 @@ trait TypeComparers {
/** Does type `tp1` conform to `tp2`? */
private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth)
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth)
+
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2))
+ return (tp1 =:= tp2) || retry(tp1.underlying, tp2)
+
+ if (tp1.isHigherKinded || tp2.isHigherKinded)
+ return isHKSubType(tp1, tp2, depth)
/* First try, on the right:
* - unwrap Annotated types, BoundedWildcardTypes,
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index b0feb0a7fb..123c296f95 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package tpe
@@ -87,8 +88,8 @@ private[internal] trait TypeConstraints {
/** @PP: Unable to see why these apparently constant types should need vals
* in every TypeConstraint, I lifted them out.
*/
- private lazy val numericLoBound = IntClass.tpe
- private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
+ private lazy val numericLoBound = IntTpe
+ private lazy val numericHiBound = intersectionType(List(ByteTpe, CharTpe), ScalaPackageClass)
/** A class expressing upper and lower bounds constraints of type variables,
* as well as their instantiations.
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index 0f9db31ec1..bebc419c7c 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package tpe
@@ -29,10 +30,10 @@ private[internal] trait TypeMaps {
def apply(tp: Type): Type = {
tp match {
case TypeRef(_, SingletonClass, _) =>
- AnyClass.tpe
+ AnyTpe
case tp1 @ RefinedType(parents, decls) =>
parents filter (_.typeSymbol != SingletonClass) match {
- case Nil => AnyClass.tpe
+ case Nil => AnyTpe
case p :: Nil if decls.isEmpty => mapOver(p)
case ps => mapOver(copyRefinedType(tp1, ps, decls))
}
@@ -326,7 +327,7 @@ private[internal] trait TypeMaps {
private var expanded = immutable.Set[Symbol]()
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- if (expanded contains sym) AnyRefClass.tpe
+ if (expanded contains sym) AnyRefTpe
else try {
expanded += sym
val eparams = mapOver(typeParamsToExistentials(sym))
@@ -525,15 +526,39 @@ private[internal] trait TypeMaps {
val TypeRef(_, rhsSym, rhsArgs) = rhs
require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
- // Find the type parameter position; we'll use the corresponding argument
- val argIndex = rhsSym.typeParams indexOf lhsSym
-
- if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application
- appliedType(rhsArgs(argIndex), lhsArgs mapConserve this)
- else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641
+ // Find the type parameter position; we'll use the corresponding argument.
+ // Why are we checking by name rather than by equality? Because for
+ // reasons which aren't yet fully clear, we can arrive here holding a type
+ // parameter whose owner is rhsSym, and which shares the name of an actual
+ // type parameter of rhsSym, but which is not among the type parameters of
+ // rhsSym. One can see examples of it at SI-4365.
+ val argIndex = rhsSym.typeParams indexWhere (lhsSym.name == _.name)
+ // don't be too zealous with the exceptions, see #2641
+ if (argIndex < 0 && rhs.parents.exists(typeIsErroneous))
ErrorType
- else
- abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs")
+ else {
+ // It's easy to get here when working on hardcore type machinery (not to
+ // mention when not doing so, see above) so let's provide a standout error.
+ def own_s(s: Symbol) = s.nameString + " in " + s.safeOwner.nameString
+ def explain =
+ sm"""| sought ${own_s(lhsSym)}
+ | classSym ${own_s(rhsSym)}
+ | tparams ${rhsSym.typeParams map own_s mkString ", "}
+ |"""
+
+ if (argIndex < 0)
+ abort(s"Something is wrong: cannot find $lhs in applied type $rhs\n" + explain)
+ else {
+ val targ = rhsArgs(argIndex)
+ // @M! don't just replace the whole thing, might be followed by type application
+ val result = appliedType(targ, lhsArgs mapConserve this)
+ def msg = s"Created $result, though could not find ${own_s(lhsSym)} among tparams of ${own_s(rhsSym)}"
+ if (!rhsSym.typeParams.contains(lhsSym))
+ devWarning(s"Inconsistent tparam/owner views: had to fall back on names\n$msg\n$explain")
+
+ result
+ }
+ }
}
// 0) @pre: `classParam` is a class type parameter
@@ -769,8 +794,7 @@ private[internal] trait TypeMaps {
}
/** A map to implement the `subst` method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
+ class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) {
protected def toType(fromtp: Type, tp: Type) = tp
override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
index c86383e9e3..16929cca0f 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package tpe
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index b8a8e4d0c0..580ada8254 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
@@ -54,9 +55,13 @@ trait Erasure {
}
}
+ /** Arrays despite their finality may turn up as refined type parents,
+ * e.g. with "tagged types" like Array[Int] with T.
+ */
protected def unboundedGenericArrayLevel(tp: Type): Int = tp match {
- case GenericArray(level, core) if !(core <:< AnyRefClass.tpe) => level
- case _ => 0
+ case GenericArray(level, core) if !(core <:< AnyRefTpe) => level
+ case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")(ps map unboundedGenericArrayLevel max)
+ case _ => 0
}
// @M #2585 when generating a java generic signature that includes
@@ -105,9 +110,6 @@ trait Erasure {
}
abstract class ErasureMap extends TypeMap {
- private lazy val ObjectArray = arrayType(ObjectClass.tpe)
- private lazy val ErasedObject = erasedTypeRef(ObjectClass)
-
def mergeParents(parents: List[Type]): Type
def eraseNormalClassRef(pre: Type, clazz: Symbol): Type =
@@ -122,11 +124,11 @@ trait Erasure {
apply(st.supertype)
case tref @ TypeRef(pre, sym, args) =>
if (sym == ArrayClass)
- if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
- else if (args.head.typeSymbol.isBottomClass) ObjectArray
+ if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe
+ else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe)
else typeRef(apply(pre), sym, args map applyInArray)
- else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ErasedObject
- else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
+ else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ObjectTpe
+ else if (sym == UnitClass) BoxedUnitTpe
else if (sym.isRefinementClass) apply(mergeParents(tp.parents))
else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref)
else if (sym.isClass) eraseNormalClassRef(pre, sym)
@@ -138,7 +140,7 @@ trait Erasure {
case mt @ MethodType(params, restpe) =>
MethodType(
cloneSymbolsAndModify(params, ErasureMap.this),
- if (restpe.typeSymbol == UnitClass) erasedTypeRef(UnitClass)
+ if (restpe.typeSymbol == UnitClass) UnitTpe
// this replaces each typeref that refers to an argument
// by the type `p.tpe` of the actual argument p (p in params)
else apply(mt.resultType(mt.paramTypes)))
@@ -149,7 +151,7 @@ trait Erasure {
case ClassInfoType(parents, decls, clazz) =>
ClassInfoType(
if (clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil
- else if (clazz == ArrayClass) List(ErasedObject)
+ else if (clazz == ArrayClass) ObjectTpe :: Nil
else removeLaterObjects(parents map this),
decls, clazz)
case _ =>
@@ -252,7 +254,7 @@ trait Erasure {
* An intersection such as `Object with Trait` erases to Object.
*/
def mergeParents(parents: List[Type]): Type =
- if (parents.isEmpty) ObjectClass.tpe
+ if (parents.isEmpty) ObjectTpe
else parents.head
}
@@ -300,7 +302,7 @@ trait Erasure {
* - Otherwise, the dominator is the first element of the span.
*/
def intersectionDominator(parents: List[Type]): Type = {
- if (parents.isEmpty) ObjectClass.tpe
+ if (parents.isEmpty) ObjectTpe
else {
val psyms = parents map (_.typeSymbol)
if (psyms contains ArrayClass) {
@@ -322,10 +324,6 @@ trait Erasure {
}
}
- /** Type reference after erasure */
- def erasedTypeRef(sym: Symbol): Type =
- typeRef(erasure(sym)(sym.owner.tpe), sym, Nil)
-
/** The symbol's erased info. This is the type's erasure, except for the following symbols:
*
* - For $asInstanceOf : [T]T
@@ -353,8 +351,7 @@ trait Erasure {
else if (sym.name == nme.update)
(tp: @unchecked) match {
case MethodType(List(index, tvar), restpe) =>
- MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar),
- erasedTypeRef(UnitClass))
+ MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar), UnitTpe)
}
else specialErasure(sym)(tp)
} else if (
diff --git a/src/reflect/scala/reflect/internal/transform/RefChecks.scala b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
index d6108ab665..4ca114e781 100644
--- a/src/reflect/scala/reflect/internal/transform/RefChecks.scala
+++ b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
@@ -10,4 +11,4 @@ trait RefChecks {
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isModule && !sym.isStatic) NullaryMethodType(tp)
else tp
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index 71cc80895d..fa185db22f 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 32d3171b26..abea8bed9f 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
@@ -25,7 +26,14 @@ trait UnCurry {
val tp = expandAlias(tp0)
tp match {
case MethodType(params, MethodType(params1, restpe)) =>
- apply(MethodType(params ::: params1, restpe))
+ // This transformation is described in UnCurryTransformer.dependentParamTypeErasure
+ val packSymbolsMap = new TypeMap {
+ // Wrapping in a TypeMap to reuse the code that opts for a fast path if the function is an identity.
+ def apply(tp: Type): Type = packSymbols(params, tp)
+ }
+ val existentiallyAbstractedParam1s = packSymbolsMap.mapOver(params1)
+ val substitutedResult = restpe.substSym(params1, existentiallyAbstractedParam1s)
+ apply(MethodType(params ::: existentiallyAbstractedParam1s, substitutedResult))
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
abort("unexpected curried method types with intervening existential")
case MethodType(h :: t, restpe) if h.isImplicit =>
@@ -38,7 +46,7 @@ trait UnCurry {
apply(seqType(arg))
case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) =>
apply(arrayType(
- if (isUnboundedGeneric(arg)) ObjectClass.tpe else arg))
+ if (isUnboundedGeneric(arg)) ObjectTpe else arg))
case _ =>
expandAlias(mapOver(tp))
}
diff --git a/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
index b204c39e9c..10a8b4c812 100644
--- a/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -2,10 +2,10 @@
* Copyright 2005-2013 LAMP/EPFL
*/
-package scala.tools.nsc
-package util
+package scala
+package reflect.internal.util
-import scala.tools.nsc.io.AbstractFile
+import scala.reflect.io.AbstractFile
import java.security.cert.Certificate
import java.security.{ ProtectionDomain, CodeSource }
import java.net.{ URL, URLConnection, URLStreamHandler }
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index d6fca9d186..e127d577e1 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.collection.{ mutable, immutable }
import scala.annotation.tailrec
@@ -208,3 +209,5 @@ trait Collections {
case _: IllegalArgumentException => None
}
}
+
+object Collections extends Collections
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
index 74b6a54c6e..b4178e055d 100644
--- a/src/reflect/scala/reflect/internal/util/HashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -3,7 +3,9 @@
* @author Martin Odersky
*/
-package scala.reflect.internal.util
+package scala
+package reflect
+package internal.util
object HashSet {
def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity)
diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala
index a2b9e24ebc..2eb4fa29d5 100644
--- a/src/reflect/scala/reflect/internal/util/Origins.scala
+++ b/src/reflect/scala/reflect/internal/util/Origins.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package internal.util
import scala.collection.{ mutable, immutable }
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index bb8c9e9b26..ddd0a64675 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -4,7 +4,8 @@
*
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.reflect.ClassTag
import scala.reflect.internal.FatalError
@@ -20,18 +21,12 @@ object Position {
else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
else posIn
)
- def file = pos.source.file
- def prefix = if (shortenFile) file.name else file.path
+ val prefix = if (shortenFile) pos.sourceName else pos.sourcePath
pos match {
case FakePos(fmsg) => fmsg+" "+msg
case NoPosition => msg
- case _ =>
- List(
- "%s:%s: %s".format(prefix, pos.line, msg),
- pos.lineContent.stripLineEnd,
- " " * (pos.column - 1) + "^"
- ) mkString "\n"
+ case _ => "%s:%s: %s\n%s\n%s".format(prefix, pos.line, msg, pos.lineContent, pos.lineCarat)
}
}
}
@@ -206,12 +201,39 @@ abstract class Position extends scala.reflect.api.Position { self =>
def column: Int = throw new UnsupportedOperationException("Position.column")
+ /** A line with a ^ padded with the right number of spaces.
+ */
+ def lineCarat: String = " " * (column - 1) + "^"
+
+ /** The line of code and the corresponding carat pointing line, trimmed
+ * to the maximum specified width, with the trimmed versions oriented
+ * around the point to give maximum context.
+ */
+ def lineWithCarat(maxWidth: Int): (String, String) = {
+ val radius = maxWidth / 2
+ var start = scala.math.max(column - radius, 0)
+ var result = lineContent drop start take maxWidth
+
+ if (result.length < maxWidth) {
+ result = lineContent takeRight maxWidth
+ start = lineContent.length - result.length
+ }
+
+ (result, lineCarat drop start take maxWidth)
+ }
+
/** Convert this to a position around `point` that spans a single source line */
def toSingleLine: Position = this
- def lineContent: String =
- if (isDefined) source.lineToString(line - 1)
- else "NO_LINE"
+ /** The source code corresponding to the range, if this is a range position.
+ * Otherwise the empty string.
+ */
+ def sourceCode = ""
+ def sourceName = "<none>"
+ def sourcePath = "<none>"
+ def lineContent = "<none>"
+ def lengthInChars = 0
+ def lengthInLines = 0
/** Map this position to a position in an original source
* file. If the SourceFile is a normal SourceFile, simply
@@ -240,7 +262,10 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
override def withPoint(off: Int) = new OffsetPosition(source, off)
override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
- override def line: Int = source.offsetToLine(point) + 1
+ override def line = source.offsetToLine(point) + 1
+ override def sourceName = source.file.name
+ override def sourcePath = source.file.path
+ override def lineContent = source.lineToString(line - 1)
override def column: Int = {
var idx = source.lineToOffset(source.offsetToLine(point))
diff --git a/src/reflect/scala/reflect/internal/util/RangePosition.scala b/src/reflect/scala/reflect/internal/util/RangePosition.scala
index 3712aa0a52..0d09a53cd9 100644
--- a/src/reflect/scala/reflect/internal/util/RangePosition.scala
+++ b/src/reflect/scala/reflect/internal/util/RangePosition.scala
@@ -3,12 +3,13 @@
* @author Paul Phillips
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** new for position ranges */
class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
extends OffsetPosition(source, point) {
- if (start > end) sys.error("bad position: "+show)
+ if (start > end) scala.sys.error("bad position: "+show)
override def isRange: Boolean = true
override def isOpaqueRange: Boolean = true
override def startOrPoint: Int = start
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index 3899ef24c7..a7fd787dfc 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -3,8 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
-package util
+package scala
+package reflect.internal.util
import java.lang.{ ClassLoader => JClassLoader }
import java.lang.reflect.{ Constructor, Modifier, Method }
@@ -49,7 +49,7 @@ trait ScalaClassLoader extends JClassLoader {
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
case null => Array()
- case stream => io.Streamable.bytes(stream)
+ case stream => scala.reflect.io.Streamable.bytes(stream)
}
/** An InputStream representing the given class name, or null if not found. */
diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala
index 57e5e0c0b9..75dcfaa59b 100644
--- a/src/reflect/scala/reflect/internal/util/Set.scala
+++ b/src/reflect/scala/reflect/internal/util/Set.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** A common class for lightweight sets.
*/
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index dd2a6e21f1..ea4c9a9b68 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -4,7 +4,8 @@
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.reflect.io.{ AbstractFile, VirtualFile }
import scala.collection.mutable.ArrayBuffer
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 0fa798058d..b583137059 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.collection.mutable
@@ -102,8 +103,8 @@ quant)
for ((_, q) <- qs if q.underlying == q;
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
- private def showPercent(x: Double, base: Double) =
- if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+ private def showPercent(x: Long, base: Long) =
+ if (base == 0) "" else f" (${x.toDouble / base.toDouble * 100}%2.1f%%)"
/** The base trait for quantities.
* Quantities with non-empty prefix are printed in the statistics info.
@@ -155,7 +156,7 @@ quant)
value = value0 + underlying.value - uvalue0
}
override def toString =
- value + showPercent(value, underlying.value)
+ value + showPercent(value.toLong, underlying.value.toLong)
}
class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
index 93bbfdd273..4d98a344d8 100644
--- a/src/reflect/scala/reflect/internal/util/StringOps.scala
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** This object provides utility methods to extract elements
* from Strings.
diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
index 9259c5abf1..e622e78d57 100644
--- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
+++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package util
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
index d57c59757d..1626da2c93 100644
--- a/src/reflect/scala/reflect/internal/util/TableDef.scala
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import TableDef._
import scala.language.postfixOps
diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
index f89bd9e199..18410510cb 100644
--- a/src/reflect/scala/reflect/internal/util/ThreeValues.scala
+++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** A simple three value type for booleans with an unknown value */
object ThreeValues {
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index 632890d600..97cc19952c 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal
+package scala
+package reflect.internal
package util
import scala.collection.{ mutable, immutable }
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
new file mode 100644
index 0000000000..c7a35d4637
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -0,0 +1,26 @@
+package scala
+package reflect
+package internal
+package util
+
+import TriState._
+
+/** A simple true/false/unknown value, for those days when
+ * true and false don't quite partition the space.
+ */
+final class TriState private (val value: Int) extends AnyVal {
+ def isKnown = this != Unknown
+ def booleanValue = this match {
+ case True => true
+ case False => false
+ case _ => sys.error("Not a Boolean value")
+ }
+}
+
+object TriState {
+ implicit def booleanToTriState(b: Boolean): TriState = if (b) True else False
+
+ val Unknown = new TriState(-1)
+ val False = new TriState(0)
+ val True = new TriState(1)
+}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index 41e74f80e9..9b792a3f43 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -1,58 +1,430 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
-import scala.collection.mutable
+import java.lang.ref.{WeakReference, ReferenceQueue}
+import scala.annotation.tailrec
import scala.collection.generic.Clearable
-import scala.runtime.AbstractFunction1
+import scala.collection.mutable.{Set => mSet}
-/** A bare-bones implementation of a mutable `Set` that uses weak references
- * to hold the elements.
+/**
+ * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
+ * hard references are associated with them. Its primary use case is as a canonical reference
+ * identity holder (aka "hash-consing") via findEntryOrUpdate
*
- * This implementation offers only add/remove/test operations,
- * therefore it does not fulfill the contract of Scala collection sets.
+ * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
+ *
+ * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
+ * properly when GC concurrently collects elements in this set.
*/
-class WeakHashSet[T <: AnyRef] extends AbstractFunction1[T, Boolean] with Clearable {
- private val underlying = mutable.HashSet[WeakReferenceWithEquals[T]]()
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
+
+ import WeakHashSet._
+
+ def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
+
+ type This = WeakHashSet[A]
+
+ /**
+ * queue of Entries that hold elements scheduled for GC
+ * the removeStaleEntries() method works through the queue to remeove
+ * stale entries from the table
+ */
+ private[this] val queue = new ReferenceQueue[A]
+
+ /**
+ * the number of elements in this set
+ */
+ private[this] var count = 0
+
+ /**
+ * from a specified initial capacity compute the capacity we'll use as being the next
+ * power of two equal to or greater than the specified initial capacity
+ */
+ private def computeCapacity = {
+ if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0");
+ var candidate = 1
+ while (candidate < initialCapacity) {
+ candidate *= 2
+ }
+ candidate
+ }
+
+ /**
+ * the underlying table of entries which is an array of Entry linked lists
+ */
+ private[this] var table = new Array[Entry[A]](computeCapacity)
+
+ /**
+ * the limit at which we'll increase the size of the hash table
+ */
+ var threshhold = computeThreshHold
+
+ private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
- /** Add the given element to this set. */
- def +=(elem: T): this.type = {
- underlying += new WeakReferenceWithEquals(elem)
- this
+ /**
+ * find the bucket associated with an elements's hash code
+ */
+ private[this] def bucketFor(hash: Int): Int = {
+ // spread the bits around to try to avoid accidental collisions using the
+ // same algorithm as java.util.HashMap
+ var h = hash
+ h ^= h >>> 20 ^ h >>> 12
+ h ^= h >>> 7 ^ h >>> 4
+
+ // this is finding h % table.length, but takes advantage of the
+ // fact that table length is a power of 2,
+ // if you don't do bit flipping in your head, if table.length
+ // is binary 100000.. (with n 0s) then table.length - 1
+ // is 1111.. with n 1's.
+ // In other words this masks on the last n bits in the hash
+ h & (table.length - 1)
}
- /** Remove the given element from this set. */
- def -=(elem: T): this.type = {
- underlying -= new WeakReferenceWithEquals(elem)
- this
+ /**
+ * remove a single entry from a linked list in a given bucket
+ */
+ private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) {
+ prevEntry match {
+ case null => table(bucket) = entry.tail
+ case _ => prevEntry.tail = entry.tail
+ }
+ count -= 1
}
- /** Does the given element belong to this set? */
- def contains(elem: T): Boolean =
- underlying.contains(new WeakReferenceWithEquals(elem))
+ /**
+ * remove entries associated with elements that have been gc'ed
+ */
+ private[this] def removeStaleEntries() {
+ def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
- /** Does the given element belong to this set? */
- def apply(elem: T): Boolean = contains(elem)
+ @tailrec
+ def queueLoop {
+ val stale = poll()
+ if (stale != null) {
+ val bucket = bucketFor(stale.hash)
- /** Return the number of elements in this set, including reclaimed elements. */
- def size = underlying.size
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry)
+ else if (entry != null) linkedListLoop(entry, entry.tail)
- /** Remove all elements in this set. */
- def clear() = underlying.clear()
-}
+ linkedListLoop(null, table(bucket))
-/** A WeakReference implementation that implements equals and hashCode by
- * delegating to the referent.
- */
-class WeakReferenceWithEquals[T <: AnyRef](ref: T) {
- def get(): T = underlying.get()
+ queueLoop
+ }
+ }
+
+ queueLoop
+ }
+
+ /**
+ * Double the size of the internal table
+ */
+ private[this] def resize() {
+ val oldTable = table
+ table = new Array[Entry[A]](oldTable.size * 2)
+ threshhold = computeThreshHold
+
+ @tailrec
+ def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ => {
+ val bucket = bucketFor(entry.hash)
+ val oldNext = entry.tail
+ entry.tail = table(bucket)
+ table(bucket) = entry
+ linkedListLoop(oldNext)
+ }
+ }
+ linkedListLoop(oldTable(oldBucket))
+
+ tableLoop(oldBucket + 1)
+ }
+ tableLoop(0)
+ }
+
+ // from scala.reflect.internal.Set, find an element or null if it isn't contained
+ override def findEntry(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => null.asInstanceOf[A]
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(table(bucket))
+ }
+ }
+ // add an element to this set unless it's already in there and return the element
+ def findEntryOrUpdate(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() = {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ elem
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => add()
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(oldHead)
+ }
+ }
+
+ // add an element to this set unless it's already in there and return this set
+ override def +(elem: A): this.type = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => add()
+ case _ if (elem == entry.get) => ()
+ case _ => linkedListLoop(entry.tail)
+ }
+
+ linkedListLoop(oldHead)
+ this
+ }
+ }
+
+ def +=(elem: A) = this + elem
+
+ // from scala.reflect.interanl.Set
+ override def addEntry(x: A) { this += x }
+
+ // remove an element from this set and return this set
+ override def -(elem: A): this.type = elem match {
+ case null => this
+ case _ => {
+ removeStaleEntries()
+ val bucket = bucketFor(elem.hashCode)
+
+
+
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ if (elem == entry.get) => remove(bucket, prevEntry, entry)
+ case _ => linkedListLoop(entry, entry.tail)
+ }
- override val hashCode = ref.hashCode
+ linkedListLoop(null, table(bucket))
+ this
+ }
+ }
+
+ def -=(elem: A) = this - elem
+
+ // empty this set
+ override def clear(): Unit = {
+ table = new Array[Entry[A]](table.size)
+ threshhold = computeThreshHold
+ count = 0
+
+ // drain the queue - doesn't do anything because we're throwing away all the values anyway
+ @tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop()
+ queueLoop()
+ }
+
+ // true if this set is empty
+ override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor)
+
+ // the number of elements in this set
+ override def size: Int = {
+ removeStaleEntries()
+ count
+ }
+
+ override def apply(x: A): Boolean = this contains x
+
+ override def foreach[U](f: A => U): Unit = iterator foreach f
+
+ override def toList(): List[A] = iterator.toList
+
+ // Iterator over all the elements in this set in no particular order
+ override def iterator: Iterator[A] = {
+ removeStaleEntries()
+
+ new Iterator[A] {
+
+ /**
+ * the bucket currently being examined. Initially it's set past the last bucket and will be decremented
+ */
+ private[this] var currentBucket: Int = table.size
+
+ /**
+ * the entry that was last examined
+ */
+ private[this] var entry: Entry[A] = null
+
+ /**
+ * the element that will be the result of the next call to next()
+ */
+ private[this] var lookaheadelement: A = null.asInstanceOf[A]
+
+ @tailrec
+ def hasNext: Boolean = {
+ while (entry == null && currentBucket > 0) {
+ currentBucket -= 1
+ entry = table(currentBucket)
+ }
+
+ if (entry == null) false
+ else {
+ lookaheadelement = entry.get
+ if (lookaheadelement == null) {
+ // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry
+ entry = entry.tail
+ hasNext
+ } else {
+ true
+ }
+ }
+ }
- override def equals(other: Any): Boolean = other match {
- case wf: WeakReferenceWithEquals[_] =>
- underlying.get() == wf.get()
- case _ =>
- false
+ def next(): A = if (lookaheadelement == null)
+ throw new IndexOutOfBoundsException("next on an empty iterator")
+ else {
+ val result = lookaheadelement
+ lookaheadelement = null.asInstanceOf[A]
+ entry = entry.tail
+ result
+ }
+ }
}
- private val underlying = new java.lang.ref.WeakReference(ref)
+ /**
+ * Diagnostic information about the internals of this set. Not normally
+ * needed by ordinary code, but may be useful for diagnosing performance problems
+ */
+ private[util] class Diagnostics {
+ /**
+ * Verify that the internal structure of this hash set is fully consistent.
+ * Throws an assertion error on any problem. In order for it to be reliable
+ * the entries must be stable. If any are garbage collected during validation
+ * then an assertion may inappropriately fire.
+ */
+ def fullyValidate {
+ var computedCount = 0
+ var bucket = 0
+ while (bucket < table.size) {
+ var entry = table(bucket)
+ while (entry != null) {
+ assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted")
+ computedCount += 1
+ val cachedHash = entry.hash
+ val realHash = entry.get.hashCode
+ assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash")
+ val computedBucket = bucketFor(realHash)
+ assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket")
+
+ entry = entry.tail
+ }
+
+ bucket += 1
+ }
+
+ assert(computedCount == count, s"The computed count was $computedCount but should have been $count")
+ }
+
+ /**
+ * Produces a diagnostic dump of the table that underlies this hash set.
+ */
+ def dump = table.deep
+
+ /**
+ * Number of buckets that hold collisions. Useful for diagnosing performance issues.
+ */
+ def collisionBucketsCount: Int =
+ (table filter (entry => entry != null && entry.tail != null)).size
+
+ /**
+ * Number of buckets that are occupied in this hash table.
+ */
+ def fullBucketsCount: Int =
+ (table filter (entry => entry != null)).size
+
+ /**
+ * Number of buckets in the table
+ */
+ def bucketsCount: Int = table.size
+
+ /**
+ * Number of buckets that don't hold anything
+ */
+ def emptyBucketsCount = bucketsCount - fullBucketsCount
+
+ /**
+ * Number of elements that are in collision. Useful for diagnosing performance issues.
+ */
+ def collisionsCount = size - (fullBucketsCount - collisionBucketsCount)
+
+ /**
+ * A map from a count of elements to the number of buckets with that count
+ */
+ def elementCountDistribution = table map linkedListSize groupBy identity map {case (size, list) => (size, list.size)}
+
+ private def linkedListSize(entry: Entry[A]) = {
+ var e = entry
+ var count = 0
+ while (e != null) {
+ count += 1
+ e = e.tail
+ }
+ count
+ }
+ }
+
+ private[util] def diagnostics = new Diagnostics
}
+
+/**
+ * Companion object for WeakHashSet
+ */
+object WeakHashSet {
+ /**
+ * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and
+ * a link to the next Entry in the same bucket
+ */
+ private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
+
+ val defaultInitialCapacity = 16
+ val defaultLoadFactor = .75
+
+ def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index 1ca57b81ed..49164d366c 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -2,6 +2,8 @@ package scala
package reflect
package internal
+import scala.language.existentials // SI-6541
+
package object util {
import StringOps.longestCommonPrefix
@@ -25,11 +27,10 @@ package object util {
if (isModule)
(name split '$' filterNot (_ == "")).last + "$"
- else if (isAnon) {
- val parents = clazz.getSuperclass :: clazz.getInterfaces.toList
- parents map (c => shortClass(c)) mkString " with "
- }
- else shortenName(name)
+ else if (isAnon)
+ clazz.getSuperclass :: clazz.getInterfaces.toList map (c => shortClass(c)) mkString " with "
+ else
+ shortenName(name)
}
/**
* Adds the `sm` String interpolator to a [[scala.StringContext]].
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index 8b69efc749..ac1159b2ac 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -4,13 +4,15 @@
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream }
import java.io.{ File => JFile }
import java.net.URL
import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.util.Statistics
/**
* An abstraction over files for use in the reflection/compiler libraries.
@@ -112,7 +114,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
def underlyingSource: Option[AbstractFile] = None
/** Does this abstract file denote an existing file? */
- def exists: Boolean = (file eq null) || file.exists
+ def exists: Boolean = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount)
+ (file eq null) || file.exists
+ }
/** Does this abstract file represent something which can contain classfiles? */
def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala
index 4bf9ed8a36..2b965e6d69 100644
--- a/src/reflect/scala/reflect/io/Directory.scala
+++ b/src/reflect/scala/reflect/io/Directory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{ File => JFile }
@@ -14,7 +15,7 @@ import java.io.{ File => JFile }
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Directory {
- import scala.util.Properties.{ userHome, userDir }
+ import scala.util.Properties.userDir
private def normalizePath(s: String) = Some(apply(Path(s).normalize))
def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala
index 64651dcfbd..a9c6807e88 100644
--- a/src/reflect/scala/reflect/io/File.scala
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -7,13 +7,16 @@
\* */
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable }
-import java.io.{ File => JFile }
+ BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable,
+ File => JFile
+}
+
import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
import scala.language.{reflectiveCalls, implicitConversions}
diff --git a/src/reflect/scala/reflect/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala
index 13a1322798..fdfe0234e0 100644
--- a/src/reflect/scala/reflect/io/FileOperationException.scala
+++ b/src/reflect/scala/reflect/io/FileOperationException.scala
@@ -7,7 +7,8 @@
\* */
-package scala.reflect
+package scala
+package reflect
package io
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
case class FileOperationException(msg: String) extends RuntimeException(msg)
diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala
new file mode 100644
index 0000000000..71f8be330d
--- /dev/null
+++ b/src/reflect/scala/reflect/io/IOStats.scala
@@ -0,0 +1,32 @@
+package scala
+package reflect.io
+
+import scala.reflect.internal.util.Statistics
+
+// Due to limitations in the Statistics machinery, these are only
+// reported if this patch is applied.
+//
+// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
+// @@ -109,7 +109,7 @@ quant)
+// * Quantities with non-empty prefix are printed in the statistics info.
+// */
+// trait Quantity {
+// - if (enabled && prefix.nonEmpty) {
+// + if (prefix.nonEmpty) {
+// val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+// qs(key) = this
+// }
+// @@ -243,7 +243,7 @@ quant)
+// *
+// * to remove all Statistics code from build
+// */
+// - final val canEnable = _enabled
+// + final val canEnable = true // _enabled
+//
+// We can commit this change as the first diff reverts a fix for an IDE memory leak.
+private[io] object IOStats {
+ val fileExistsCount = Statistics.newCounter("# File.exists calls")
+ val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls")
+ val fileIsFileCount = Statistics.newCounter("# File.isFile calls")
+}
diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala
index 2c59fd8aae..a4e869ed41 100644
--- a/src/reflect/scala/reflect/io/NoAbstractFile.scala
+++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala
@@ -3,15 +3,15 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.io.InputStream
-import java.io.{ File => JFile }
/** A distinguished object so you can avoid both null
* and Option.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object NoAbstractFile extends AbstractFile {
@@ -19,7 +19,7 @@ object NoAbstractFile extends AbstractFile {
def container: AbstractFile = this
def create(): Unit = ???
def delete(): Unit = ???
- def file: JFile = null
+ def file: java.io.File = null
def input: InputStream = null
def isDirectory: Boolean = false
override def isVirtual: Boolean = true
diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
index 44fb41a1cd..15fce953f2 100644
--- a/src/reflect/scala/reflect/io/Path.scala
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -3,16 +3,17 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, RandomAccessFile }
-import java.io.{ File => JFile }
+ BufferedInputStream, BufferedOutputStream, RandomAccessFile, File => JFile }
import java.net.{ URI, URL }
import scala.util.Random.alphanumeric
import scala.language.implicitConversions
+import scala.reflect.internal.util.Statistics
/** An abstraction for filesystem paths. The differences between
* Path, File, and Directory are primarily to communicate intent.
@@ -57,8 +58,18 @@ object Path {
def apply(path: String): Path = apply(new JFile(path))
def apply(jfile: JFile): Path = try {
- if (jfile.isFile) new File(jfile)
- else if (jfile.isDirectory) new Directory(jfile)
+ def isFile = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount)
+ jfile.isFile
+ }
+
+ def isDirectory = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount)
+ jfile.isDirectory
+ }
+
+ if (isFile) new File(jfile)
+ else if (isDirectory) new Directory(jfile)
else new Path(jfile)
} catch { case ex: SecurityException => new Path(jfile) }
@@ -187,10 +198,19 @@ class Path private[io] (val jfile: JFile) {
// Boolean tests
def canRead = jfile.canRead()
def canWrite = jfile.canWrite()
- def exists = try jfile.exists() catch { case ex: SecurityException => false }
+ def exists = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount)
+ try jfile.exists() catch { case ex: SecurityException => false }
+ }
- def isFile = try jfile.isFile() catch { case ex: SecurityException => false }
- def isDirectory = try jfile.isDirectory() catch { case ex: SecurityException => false }
+ def isFile = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount)
+ try jfile.isFile() catch { case ex: SecurityException => false }
+ }
+ def isDirectory = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount)
+ try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." }
+ }
def isAbsolute = jfile.isAbsolute()
def isEmpty = path.length == 0
diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
index 31df78f995..8f24d84488 100644
--- a/src/reflect/scala/reflect/io/PlainFile.scala
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{ FileInputStream, FileOutputStream, IOException }
@@ -56,8 +57,14 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
+ // Optimization: Assume that the file was not deleted and did not have permissions changed
+ // between the call to `list` and the iteration. This saves a call to `exists`.
+ def existsFast(path: Path) = path match {
+ case (_: Directory | _: io.File) => true
+ case _ => path.exists
+ }
if (!isDirectory) Iterator.empty
- else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
+ else givenPath.toDirectory.list filter existsFast map (new PlainFile(_))
}
/**
diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
index 1d51ad7f54..aa47947672 100644
--- a/src/reflect/scala/reflect/io/Streamable.scala
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.net.{ URI, URL }
diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
index 210167e5c6..aa6ceaa09f 100644
--- a/src/reflect/scala/reflect/io/VirtualDirectory.scala
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
*/
-package scala.reflect
+package scala
+package reflect
package io
import scala.collection.mutable
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 8cc83b6a50..45f38db745 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -3,11 +3,11 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package io
-import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
-import java.io.{ File => JFile }
+import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream, File => JFile }
/** This class implements an in-memory file.
*
@@ -60,9 +60,13 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
/** @inheritdoc */
override def isVirtual: Boolean = true
+ // private var _lastModified: Long = 0
+ // _lastModified
+
/** Returns the time that this abstract file was last modified. */
- private var _lastModified: Long = 0
- def lastModified: Long = _lastModified
+ // !!! Except it doesn't - it's private and never set - so I replaced it
+ // with constant 0 to save the field.
+ def lastModified: Long = 0
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 11d04538e9..eabf1dcbab 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.net.URL
@@ -267,8 +268,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) {
def input = url.openStream()
def lastModified =
try url.openConnection().getLastModified()
- catch { case _: IOException => 0 }
-
+ catch { case _: IOException => 0 }
+
override def canEqual(other: Any) = other.isInstanceOf[ManifestResources]
override def hashCode() = url.hashCode
override def equals(that: Any) = that match {
diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala
index 92d76f4370..9e05f343e6 100644
--- a/src/reflect/scala/reflect/macros/Aliases.scala
+++ b/src/reflect/scala/reflect/macros/Aliases.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index eeb87fafcc..c1ab269268 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
index f4a4631e53..434b7c1b9c 100644
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
// todo. introduce context hierarchy
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index fd91333dae..d6ba5f39cd 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -1,6 +1,9 @@
-package scala.reflect
+package scala
+package reflect
package macros
+import scala.language.existentials // SI-6541
+
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
@@ -42,13 +45,17 @@ trait Enclosures {
*/
def enclosingMacros: List[Context]
- /** Types along with corresponding trees for which implicit arguments are currently searched.
+ /** Information about one of the currently considered implicit candidates.
+ * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+ * hence implicit searches can recursively trigger other implicit searches.
+ *
* Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
+ * If we're in an implicit macro being expanded, it's included in this list.
*
* Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created
* and always stays the same regardless of whatever happens during macro expansion.
*/
- def enclosingImplicits: List[(Type, Tree)]
+ def enclosingImplicits: List[ImplicitCandidate]
/** Tries to guess a position for the enclosing application.
* But that is simple, right? Just dereference `pos` of `macroApplication`? Not really.
@@ -100,4 +107,4 @@ trait Enclosures {
*/
case class EnclosureException(expected: Class[_], enclosingTrees: List[Tree])
extends Exception(s"Couldn't find a tree of type $expected among enclosing trees $enclosingTrees")
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala
index 37680c219b..70b2ab58d4 100644
--- a/src/reflect/scala/reflect/macros/Evals.scala
+++ b/src/reflect/scala/reflect/macros/Evals.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -54,4 +55,4 @@ trait Evals {
* refers to a runtime value `x`, which is unknown at compile time.
*/
def eval[T](expr: Expr[T]): T
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala
index 458cde9692..af11bd6efc 100644
--- a/src/reflect/scala/reflect/macros/ExprUtils.scala
+++ b/src/reflect/scala/reflect/macros/ExprUtils.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala
index 67b24088b5..6abd8c335b 100644
--- a/src/reflect/scala/reflect/macros/FrontEnds.scala
+++ b/src/reflect/scala/reflect/macros/FrontEnds.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -44,4 +45,4 @@ trait FrontEnds {
* Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
*/
def abort(pos: Position, msg: String): Nothing
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
index 99706e84fe..eb63fb7b7f 100644
--- a/src/reflect/scala/reflect/macros/Infrastructure.scala
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -22,4 +23,4 @@ trait Infrastructure {
/** Exposes current classpath. */
def classPath: List[java.net.URL]
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Macro.scala b/src/reflect/scala/reflect/macros/Macro.scala
new file mode 100644
index 0000000000..44bedf483d
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Macro.scala
@@ -0,0 +1,39 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * Traditionally macro implementations are defined as methods,
+ * but this trait provides an alternative way of encoding macro impls as
+ * bundles, traits which extend `scala.reflect.macros.Macro`.
+ *
+ * Instead of:
+ *
+ * def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[Int]) = ...
+ *
+ * One can write:
+ *
+ * trait Impl extends Macro {
+ * def apply[T: c.WeakTypeTag](x: c.Expr[Int]) = ...
+ * }
+ *
+ * Without changing anything else at all.
+ *
+ * This language feature is useful in itself in cases when macro implementations
+ * are complex and need to be modularized. State of the art technique of addressing this need is quite heavyweight:
+ * http://docs.scala-lang.org/overviews/macros/overview.html#writing_bigger_macros.
+ *
+ * However utility of this approach to writing macros isn't limited to just convenience.
+ * When a macro implementation becomes not just a function, but a full-fledged module,
+ * it can define callbacks that will be called by the compiler upon interesting events.
+ * In subsequent commits I will add support for programmable type inference
+ */
+trait Macro {
+ /** The context to be used by the macro implementation.
+ *
+ * Vanilla macro implementations have to carry it in their signatures, however when a macro is a full-fledged module,
+ * it can define the context next to the implementation, makes implementation signature more lightweight.
+ */
+ val c: Context
+}
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index 7e2ac5e02d..8773175561 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index b4b93da3fa..3b25309614 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
index 1eae3e3fce..6ebd2db730 100644
--- a/src/reflect/scala/reflect/macros/Reifiers.scala
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Synthetics.scala b/src/reflect/scala/reflect/macros/Synthetics.scala
index 14c6c930b3..5e422ee89f 100644
--- a/src/reflect/scala/reflect/macros/Synthetics.scala
+++ b/src/reflect/scala/reflect/macros/Synthetics.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
index 19230010e6..427b4f70d1 100644
--- a/src/reflect/scala/reflect/macros/TreeBuilder.scala
+++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index 09a2373205..d7aec9b3ef 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -10,8 +11,6 @@ package macros
trait Typers {
self: Context =>
- import universe._
-
/** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
* Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
*
@@ -24,13 +23,26 @@ trait Typers {
*/
def openMacros: List[Context]
- /** Types along with corresponding trees for which implicit arguments are currently searched.
+ /** Information about one of the currently considered implicit candidates.
+ * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+ * hence implicit searches can recursively trigger other implicit searches.
+ *
+ * `pre` and `sym` provide information about the candidate itself.
+ * `pt` and `tree` store the parameters of the implicit search the candidate is participating in.
+ */
+ case class ImplicitCandidate(pre: Type, sym: Symbol, pt: Type, tree: Tree)
+
+ /** Information about one of the currently considered implicit candidates.
+ * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+ * hence implicit searches can recursively trigger other implicit searches.
+ *
* Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
+ * If we're in an implicit macro being expanded, it's included in this list.
*
* Unlike `enclosingImplicits`, this is a def, which means that it gets recalculated on every invocation,
* so it might change depending on what is going on during macro expansion.
*/
- def openImplicits: List[(Type, Tree)]
+ def openImplicits: List[ImplicitCandidate]
/** Typechecks the provided tree against the expected type `pt` in the macro callsite context.
*
@@ -45,7 +57,7 @@ trait Typers {
*
* @throws [[scala.reflect.macros.TypecheckException]]
*/
- def typeCheck(tree: Tree, pt: Type = WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
+ def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
/** Infers an implicit value of the expected type `pt` in the macro callsite context.
* Optional `pos` parameter provides a position that will be associated with the implicit search.
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 31f3192a85..d1d90f53c9 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala
index 21d189bb25..2e2e8e79f8 100644
--- a/src/reflect/scala/reflect/macros/package.scala
+++ b/src/reflect/scala/reflect/macros/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index f324f5145a..16405a88b4 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.ref.WeakReference
@@ -6,14 +7,14 @@ import scala.collection.mutable.WeakHashMap
import java.lang.{Class => jClass, Package => jPackage}
import java.lang.reflect.{
- Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
+ Method => jMethod, Constructor => jConstructor, Field => jField,
Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
+ AccessibleObject => jAccessibleObject,
GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
import java.lang.annotation.{Annotation => jAnnotation}
import java.io.IOException
-import internal.MissingRequirementError
+import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags, JMethodOrConstructor }
import internal.pickling.ByteCodecs
-import internal.ClassfileConstants._
import internal.pickling.UnPickler
import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
@@ -88,12 +89,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
// ----------- Caching ------------------------------------------------------------------
- private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
- private val packageCache = new TwoWayCache[Package, ModuleSymbol]
- private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
+ private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
+ private val packageCache = new TwoWayCache[Package, ModuleSymbol]
+ private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
private val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol]
- private val fieldCache = new TwoWayCache[jField, TermSymbol]
- private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
+ private val fieldCache = new TwoWayCache[jField, TermSymbol]
+ private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S =
cache.toScala(key){
@@ -101,38 +102,35 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
body(mirrorDefining(jclazz), key)
}
- private implicit val classHasJavaClass: HasJavaClass[jClass[_]] =
- new HasJavaClass(identity)
- private implicit val methHasJavaClass: HasJavaClass[jMethod]
- = new HasJavaClass(_.getDeclaringClass)
- private implicit val fieldHasJavaClass: HasJavaClass[jField] =
- new HasJavaClass(_.getDeclaringClass)
- private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] =
- new HasJavaClass(_.getDeclaringClass)
+ private implicit val classHasJavaClass: HasJavaClass[jClass[_]] = new HasJavaClass(identity)
+ private implicit val methHasJavaClass: HasJavaClass[jMethod] = new HasJavaClass(_.getDeclaringClass)
+ private implicit val fieldHasJavaClass: HasJavaClass[jField] = new HasJavaClass(_.getDeclaringClass)
+ private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] = new HasJavaClass(_.getDeclaringClass)
private implicit val tparamHasJavaClass: HasJavaClass[jTypeVariable[_ <: GenericDeclaration]] =
new HasJavaClass ( (tparam: jTypeVariable[_ <: GenericDeclaration]) => {
tparam.getGenericDeclaration match {
- case jclazz: jClass[_] => jclazz
- case jmeth: jMethod => jmeth.getDeclaringClass
+ case jclazz: jClass[_] => jclazz
+ case jmeth: jMethod => jmeth.getDeclaringClass
case jconstr: jConstructor[_] => jconstr.getDeclaringClass
}
})
// ----------- Implementations of mirror operations and classes -------------------
- private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
- private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
- private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
- private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
- private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
- private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym")
- private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException(
+ private def abort(msg: String) = throw new ScalaReflectionException(msg)
+
+ private def ErrorInnerClass(sym: Symbol) = abort(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+ private def ErrorInnerModule(sym: Symbol) = abort(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+ private def ErrorStaticClass(sym: Symbol) = abort(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+ private def ErrorStaticModule(sym: Symbol) = abort(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+ private def ErrorNotMember(sym: Symbol, owner: Symbol) = abort(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+ private def ErrorNotField(sym: Symbol) = abort(s"expected a field or an accessor method symbol, you provided $sym")
+ private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = abort(s"expected a constructor of $owner, you provided $sym")
+ private def ErrorFree(member: Symbol, freeType: Symbol) = abort(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+ private def ErrorNonExistentField(sym: Symbol) = abort(
sm"""Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
|note that private parameters of class constructors don't get mapped onto fields and/or accessors,
|unless they are used outside of their declaring constructors.""")
- private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}")
- private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
- private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
/** Helper functions for extracting typed values from a (Class[_], Any)
* representing an annotation argument.
@@ -210,7 +208,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case _ => body
}
}
-
private def checkMemberOf(sym: Symbol, owner: ClassSymbol) {
if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) {
// do nothing
@@ -236,8 +233,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (staticClazz.isPrimitive) staticClazz else dynamicClazz
}
- private class JavaInstanceMirror[T: ClassTag](val instance: T)
- extends InstanceMirror {
+ private class JavaInstanceMirror[T: ClassTag](val instance: T) extends InstanceMirror {
def symbol = thisMirror.classSymbol(preciseClass(instance))
def reflectField(field: TermSymbol): FieldMirror = {
checkMemberOf(field, symbol)
@@ -269,14 +265,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol)
extends FieldMirror {
- lazy val jfield = {
- val jfield = fieldToJava(symbol)
- if (!jfield.isAccessible) jfield.setAccessible(true)
- jfield
- }
- def get = jfield.get(receiver)
+ lazy val jfield = ensureAccessible(fieldToJava(symbol))
+ def get = jfield get receiver
def set(value: Any) = {
- if (!symbol.isMutable) ErrorSetImmutableField(symbol)
+ // it appears useful to be able to set values of vals, therefore I'm disabling this check
+ // if (!symbol.isMutable) ErrorSetImmutableField(symbol)
jfield.set(receiver, value)
}
def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol)
@@ -338,15 +331,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
}
}
-
- private abstract class JavaMethodMirror(val symbol: MethodSymbol)
- extends MethodMirror {
- lazy val jmeth = {
- val jmeth = methodToJava(symbol)
- if (!jmeth.isAccessible) jmeth.setAccessible(true)
- jmeth
- }
-
+ private abstract class JavaMethodMirror(val symbol: MethodSymbol) extends MethodMirror {
+ lazy val jmeth = ensureAccessible(methodToJava(symbol))
def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = {
@@ -418,13 +404,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (!perfectMatch && !varargMatch) {
val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}"
val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
- throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
+ abort(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
}
def objReceiver = receiver.asInstanceOf[AnyRef]
def objArg0 = args(0).asInstanceOf[AnyRef]
def objArgs = args.asInstanceOf[Seq[AnyRef]]
- def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
+ def fail(msg: String) = abort(msg + ", it cannot be invoked with mirrors")
def invokePrimitiveMethod = {
val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
@@ -464,14 +450,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
extends MethodMirror {
def bind(newReceiver: Any) = new JavaConstructorMirror(newReceiver.asInstanceOf[AnyRef], symbol)
override val receiver = outer
- lazy val jconstr = {
- val jconstr = constructorToJava(symbol)
- if (!jconstr.isAccessible) jconstr.setAccessible(true)
- jconstr
- }
+ lazy val jconstr = ensureAccessible(constructorToJava(symbol))
def apply(args: Any*): Any = {
if (symbol.owner == ArrayClass)
- throw new ScalaReflectionException("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
+ abort("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
val effectiveArgs =
if (outer == null) args.asInstanceOf[Seq[AnyRef]]
@@ -532,7 +514,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
def javaClass(path: String): jClass[_] =
- Class.forName(path, true, classLoader)
+ jClass.forName(path, true, classLoader)
/** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */
def tryJavaClass(path: String): Option[jClass[_]] = (
@@ -599,15 +581,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
case Some(slsig) =>
info(s"unpickling Scala $clazz and $module with long Scala signature")
- val byteSegments = slsig map (_.getBytes)
- val lens = byteSegments map ByteCodecs.decode
- val bytes = Array.ofDim[Byte](lens.sum)
- var len = 0
- for ((bs, l) <- byteSegments zip lens) {
- bs.copyToArray(bytes, len, l)
- len += l
- }
- unpickler.unpickle(bytes, 0, clazz, module, jclazz.getName)
+ val encoded = slsig flatMap (_.getBytes)
+ val len = ByteCodecs.decode(encoded)
+ val decoded = encoded.take(len)
+ unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName)
case None =>
// class does not have a Scala signature; it's a Java class
info("translating reflection info for Java " + jclazz) //debug
@@ -655,13 +632,22 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
// SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser
val jexTpes = jann match {
- case jm: jMethod => jm.getExceptionTypes.toList
+ case jm: jMethod => jm.getExceptionTypes.toList
case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList
- case _ => Nil
+ case _ => Nil
}
jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe)))
}
+ private implicit class jClassOps(val clazz: jClass[_]) {
+ def javaFlags: JavaAccFlags = JavaAccFlags(clazz)
+ def scalaFlags: Long = javaFlags.toScalaFlags
+ }
+ private implicit class jMemberOps(val member: jMember) {
+ def javaFlags: JavaAccFlags = JavaAccFlags(member)
+ def scalaFlags: Long = javaFlags.toScalaFlags
+ }
+
/**
* A completer that fills in the types of a Scala class and its companion object
* by copying corresponding type info from a Java class. This completer is used
@@ -681,14 +667,14 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def load(sym: Symbol): Unit = {
debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
- val flags = toScalaClassFlags(jclazz.getModifiers)
+ val flags = jclazz.scalaFlags
clazz setFlag (flags | JAVA)
if (module != NoSymbol) {
module setFlag (flags & PRIVATE | JAVA)
module.moduleClass setFlag (flags & PRIVATE | JAVA)
}
- relatedSymbols foreach (importPrivateWithinFromJavaFlags(_, jclazz.getModifiers))
+ propagatePackageBoundary(jclazz, relatedSymbols: _*)
copyAnnotations(clazz, jclazz)
// to do: annotations to set also for module?
@@ -710,7 +696,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val parents = try {
parentsLevel += 1
val jsuperclazz = jclazz.getGenericSuperclass
- val superclazz = if (jsuperclazz == null) AnyClass.tpe else typeToScala(jsuperclazz)
+ val superclazz = if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)
superclazz :: (jclazz.getGenericInterfaces.toList map typeToScala)
} finally {
parentsLevel -= 1
@@ -720,28 +706,21 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass)
}
- def enter(sym: Symbol, mods: Int) =
- (if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym
+ def enter(sym: Symbol, mods: JavaAccFlags) =
+ ( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym
for (jinner <- jclazz.getDeclaredClasses)
jclassAsScala(jinner) // inner class is entered as a side-effect
// no need to call enter explicitly
- pendingLoadActions = { () =>
-
- for (jfield <- jclazz.getDeclaredFields)
- enter(jfieldAsScala(jfield), jfield.getModifiers)
-
- for (jmeth <- jclazz.getDeclaredMethods)
- enter(jmethodAsScala(jmeth), jmeth.getModifiers)
-
- for (jconstr <- jclazz.getConstructors)
- enter(jconstrAsScala(jconstr), jconstr.getModifiers)
-
- } :: pendingLoadActions
+ pendingLoadActions ::= { () =>
+ jclazz.getDeclaredFields foreach (f => enter(jfieldAsScala(f), f.javaFlags))
+ jclazz.getDeclaredMethods foreach (m => enter(jmethodAsScala(m), m.javaFlags))
+ jclazz.getConstructors foreach (c => enter(jconstrAsScala(c), c.javaFlags))
+ }
if (parentsLevel == 0) {
- while (!pendingLoadActions.isEmpty) {
+ while (pendingLoadActions.nonEmpty) {
val item = pendingLoadActions.head
pendingLoadActions = pendingLoadActions.tail
item()
@@ -760,8 +739,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* If Java modifiers `mods` contain STATIC, return the module class
* of the companion module of `clazz`, otherwise the class `clazz` itself.
*/
- private def followStatic(clazz: Symbol, mods: Int) =
- if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz
+ private def followStatic(clazz: Symbol, mods: JavaAccFlags) =
+ if (mods.isStatic) clazz.companionModule.moduleClass else clazz
/** Methods which need to be treated with care
* because they either are getSimpleName or call getSimpleName:
@@ -797,7 +776,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (jclazz.isMemberClass) {
val jEnclosingClass = jclazz.getEnclosingClass
val sEnclosingClass = classToScala(jEnclosingClass)
- followStatic(sEnclosingClass, jclazz.getModifiers)
+ followStatic(sEnclosingClass, jclazz.javaFlags)
} else if (jclazz.isLocalClass0) {
val jEnclosingMethod = jclazz.getEnclosingMethod
if (jEnclosingMethod != null) {
@@ -825,7 +804,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* The Scala owner of the Scala symbol corresponding to the Java member `jmember`
*/
private def sOwner(jmember: jMember): Symbol = {
- followStatic(classToScala(jmember.getDeclaringClass), jmember.getModifiers)
+ followStatic(classToScala(jmember.getDeclaringClass), jmember.javaFlags)
}
/**
@@ -866,7 +845,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def methodToScala1(jmeth: jMethod): MethodSymbol = {
val jOwner = jmeth.getDeclaringClass
val preOwner = classToScala(jOwner)
- val owner = followStatic(preOwner, jmeth.getModifiers)
+ val owner = followStatic(preOwner, jmeth.javaFlags)
(lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth))
.asMethod
}
@@ -880,7 +859,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
toScala(constructorCache, jconstr)(_ constructorToScala1 _)
private def constructorToScala1(jconstr: jConstructor[_]): MethodSymbol = {
- val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.getModifiers)
+ val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.javaFlags)
(lookup(owner, jconstr.getName) suchThat (erasesTo(_, jconstr)) orElse jconstrAsScala(jconstr))
.asMethod
}
@@ -988,8 +967,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
javaTypeToValueClass(jclazz) orElse lookupClass
assert (cls.isType,
- sm"""${if (cls == NoSymbol) "not a type: symbol" else "no symbol could be"}
- | loaded from $jclazz in $owner with name $simpleName and classloader $classLoader""")
+ (if (cls != NoSymbol) s"not a type: symbol $cls" else "no symbol could be") +
+ s" loaded from $jclazz in $owner with name $simpleName and classloader $classLoader")
cls.asClass
}
@@ -1018,6 +997,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case jmeth: jMethod => methodToScala(jmeth)
case jconstr: jConstructor[_] => constructorToScala(jconstr)
}
+ def reflectMemberToScala(m: jMember): Symbol = m match {
+ case x: GenericDeclaration => genericDeclarationToScala(x)
+ case x: jField => jfieldAsScala(x)
+ }
/**
* Given some Java type arguments, a corresponding list of Scala types, plus potentially
@@ -1092,10 +1075,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def jfieldAsScala1(jfield: jField): TermSymbol = {
val field = sOwner(jfield)
- .newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers))
+ .newValue(newTermName(jfield.getName), NoPosition, jfield.scalaFlags)
.setInfo(typeToScala(jfield.getGenericType))
- fieldCache enter (jfield, field)
- importPrivateWithinFromJavaFlags(field, jfield.getModifiers)
+
+ fieldCache.enter(jfield, field)
+ propagatePackageBoundary(jfield, field)
copyAnnotations(field, jfield)
field
}
@@ -1115,16 +1099,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = {
val clazz = sOwner(jmeth)
- val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, toScalaMethodFlags(jmeth.getModifiers))
+ val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags)
methodCache enter (jmeth, meth)
val tparams = jmeth.getTypeParameters.toList map createTypeParameter
val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala
val resulttpe = typeToScala(jmeth.getGenericReturnType)
setMethType(meth, tparams, paramtpes, resulttpe)
- importPrivateWithinFromJavaFlags(meth, jmeth.getModifiers)
+ propagatePackageBoundary(jmeth.javaFlags, meth)
copyAnnotations(meth, jmeth)
- if ((jmeth.getModifiers & JAVA_ACC_VARARGS) != 0) meth.setInfo(arrayToRepeated(meth.info))
- meth
+
+ if (jmeth.javaFlags.isVarargs)
+ meth modifyInfo arrayToRepeated
+ else
+ meth
}
/**
@@ -1139,13 +1126,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def jconstrAsScala1(jconstr: jConstructor[_]): MethodSymbol = {
// [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out.
val clazz = sOwner(jconstr)
- val constr = clazz.newConstructor(NoPosition, toScalaMethodFlags(jconstr.getModifiers))
+ val constr = clazz.newConstructor(NoPosition, jconstr.scalaFlags)
constructorCache enter (jconstr, constr)
val tparams = jconstr.getTypeParameters.toList map createTypeParameter
val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
setMethType(constr, tparams, paramtpes, clazz.tpe_*)
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
- importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers)
+ propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
constr
}
@@ -1170,13 +1157,15 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
else if (clazz.isTopLevel)
javaClass(clazz.javaClassName)
else if (clazz.owner.isClass) {
- val childOfClass = !clazz.owner.isModuleClass
- val childOfTopLevel = clazz.owner.isTopLevel
+ val childOfClass = !clazz.owner.isModuleClass
+ val childOfTopLevel = clazz.owner.isTopLevel
val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel
// suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759
var ownerClazz = classToJava(clazz.owner.asClass)
- if (childOfTopLevelObject) ownerClazz = Class.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+ if (childOfTopLevelObject)
+ ownerClazz = jClass.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+
val ownerChildren = ownerClazz.getDeclaredClasses
var fullNameOfJavaClass = ownerClazz.getName
@@ -1241,11 +1230,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* Pre: Scala type is already transformed to Java level.
*/
def typeToJavaClass(tpe: Type): jClass[_] = tpe match {
- case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
- case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
- case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
+ case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
+ case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
+ case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
- case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
+ case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
}
}
@@ -1285,14 +1274,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- syntheticCoreClasses get (owner.fullName, name) match {
- case Some(tsym) =>
- // synthetic core classes are only present in root mirrors
- // because Definitions.scala, which initializes and enters them, only affects rootMirror
- // therefore we need to enter them manually for non-root mirrors
- if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
- return tsym
- case None =>
+ syntheticCoreClasses get ((owner.fullName, name)) foreach { tsym =>
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
+ return tsym
}
}
info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 4d90afcdc3..06a7db6289 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
/** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
@@ -9,16 +10,21 @@ package runtime
*/
class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
- def inform(msg: String): Unit = log(msg)
+ override def inform(msg: String): Unit = log(msg)
def picklerPhase = internal.SomePhase
lazy val settings = new Settings
private val isLogging = sys.props contains "scala.debug.reflect"
- def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg)
+ def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg)
type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+ // can't put this in runtime.Trees since that's mixed with Global in ReflectGlobal, which has the definition from internal.Trees
+ object treeInfo extends {
+ val global: JavaUniverse.this.type = JavaUniverse.this
+ } with internal.TreeInfo
+
init()
}
diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
index 6e28fc8520..84f159be00 100644
--- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import internal.{SomePhase, NoPhase, Phase, TreeGen}
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index aebaea40af..2db9706007 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -3,10 +3,12 @@
* @author Paul Phillips
*/
-package scala.reflect.runtime
+package scala
+package reflect.runtime
import java.lang.{Class => jClass}
import java.lang.reflect.{ Method, InvocationTargetException, UndeclaredThrowableException }
+import scala.reflect.internal.util.AbstractFileClassLoader
/** A few java-reflection oriented utility functions useful during reflection bootstrapping.
*/
@@ -28,21 +30,12 @@ private[scala] object ReflectionUtils {
case ex if pf isDefinedAt unwrapThrowable(ex) => pf(unwrapThrowable(ex))
}
- private def systemProperties: Iterator[(String, String)] = {
- import scala.collection.JavaConverters._
- System.getProperties.asScala.iterator
- }
-
- private def inferBootClasspath: String = (
- systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
- )
-
def show(cl: ClassLoader): String = {
import scala.language.reflectiveCalls
def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
if (clazz == null) return false
- if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
+ if (clazz == classOf[AbstractFileClassLoader]) return true
isAbstractFileClassLoader(clazz.getSuperclass)
}
def inferClasspath(cl: ClassLoader): String = cl match {
@@ -51,7 +44,8 @@ private[scala] object ReflectionUtils {
case cl if cl != null && isAbstractFileClassLoader(cl.getClass) =>
cl.asInstanceOf[{val root: scala.reflect.io.AbstractFile}].root.canonicalPath
case null =>
- inferBootClasspath
+ val loadBootCp = (flavor: String) => scala.util.Properties.propOrNone(flavor + ".boot.class.path")
+ loadBootCp("sun") orElse loadBootCp("java") getOrElse "<unknown>"
case _ =>
"<unknown>"
}
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 6714bae1e0..a14eafff24 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.reflect.internal.settings.MutableSettings
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index ea14e8ad43..815cc0c885 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import internal.Flags
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index ade7a4a21a..bcd4d16cde 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.reflect.internal.Flags._
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 7b280e59b9..132470b2e7 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
// SI-6240: test thread-safety, make trees synchronized as well
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 1154927279..98cad45db1 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.reflect.io.AbstractFile
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index a3e7c28ca4..f4b02c5bcd 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.collection.mutable.WeakHashMap
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
index 05debcba65..181ca6014a 100644
--- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala
+++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.collection.mutable.WeakHashMap
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index eadbc0c52e..41c1310e17 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
/** Entry points into runtime reflection.
* See [[scala.reflect.api.package the overview page]] for details on how to use them.
@@ -20,7 +21,7 @@ package object runtime {
*/
// implementation hardwired to the `currentMirror` method below
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def currentMirror: universe.Mirror = ??? // macro
+ def currentMirror: universe.Mirror = macro ???
}
package runtime {
diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
index 9e87b6ba55..43f0ea1256 100644
--- a/src/repl/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -3,7 +3,8 @@
* @author Lex Spoon
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import io.{ File }
import util.{ ClassPath, ScalaClassLoader }
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 55182a4f95..9353215e1e 100644
--- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -95,4 +95,19 @@ trait ExprTyper {
}
finally typeOfExpressionDepth -= 1
}
+
+ // This only works for proper types.
+ def typeOfTypeString(typeString: String): Type = {
+ def asProperType(): Option[Type] = {
+ val name = freshInternalVarName()
+ val line = "def %s: %s = ???" format (name, typeString)
+ interpretSynthetic(line) match {
+ case IR.Success =>
+ val sym0 = symbolOfTerm(name)
+ Some(sym0.asMethod.returnType)
+ case _ => None
+ }
+ }
+ beSilentDuring(asProperType()) getOrElse NoType
+ }
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index df28e428ce..8ec8b2ed5f 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -3,7 +3,8 @@
* @author Alexander Spoon
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import Predef.{ println => _, _ }
@@ -221,6 +222,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
shCommand,
nullary("silent", "disable/enable automatic printing of results", verbosity),
cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
+ cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
)
@@ -253,16 +255,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def findToolsJar() = {
- val jdkPath = Directory(jdkHome)
- val jar = jdkPath / "lib" / "tools.jar" toFile
+ private def findToolsJar() = PathResolver.SupplementalLocations.platformTools
- if (jar isFile)
- Some(jar)
- else if (jdkPath.isDirectory)
- jdkPath.deepFiles find (_.name == "tools.jar")
- else None
- }
private def addToolsJarToLoader() = {
val cl = findToolsJar() match {
case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
@@ -286,9 +280,15 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// Still todo: modules.
private def typeCommand(line0: String): Result = {
line0.trim match {
- case "" => ":type [-v] <expression>"
- case s if s startsWith "-v " => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = true)
- case s => intp.typeCommandInternal(s, verbose = false)
+ case "" => ":type [-v] <expression>"
+ case s => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ")
+ }
+ }
+
+ private def kindCommand(expr: String): Result = {
+ expr.trim match {
+ case "" => ":kind [-v] <expression>"
+ case s => intp.kindCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ")
}
}
@@ -642,10 +642,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
private def loopPostInit() {
- in match {
- case x: JLineReader => x.consoleReader.postInit
- case _ =>
- }
// Bind intp somewhere out of the regular namespace where
// we can get at it in generated code.
intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))
@@ -661,6 +657,11 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
unleashAndSetPhase()
asyncMessage(power.banner)
}
+ // SI-7418 Now, and only now, can we enable TAB completion.
+ in match {
+ case x: JLineReader => x.consoleReader.postInit
+ case _ =>
+ }
}
def process(settings: Settings): Boolean = savingContextLoader {
this.settings = settings
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 4ba81b634a..3a71930383 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import Predef.{ println => _, _ }
@@ -29,6 +30,7 @@ import java.util.concurrent.Future
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
import StdReplTags._
+import scala.language.implicitConversions
/** An interpreter for Scala code.
*
@@ -555,7 +557,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
@throws(classOf[ScriptException])
def compile(script: String): CompiledScript = {
if (!bound) {
- quietBind("engine", this.asInstanceOf[ScriptEngine])
+ quietBind("engine" -> this.asInstanceOf[ScriptEngine])
bound = true
}
val cat = code + script
@@ -595,11 +597,19 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private class WrappedRequest(val req: Request) extends CompiledScript {
var recorded = false
+ /** In Java we would have to wrap any checked exception in the declared
+ * ScriptException. Runtime exceptions and errors would be ok and would
+ * not need to be caught. So let us do the same in Scala : catch and
+ * wrap any checked exception, and let runtime exceptions and errors
+ * escape. We could have wrapped runtime exceptions just like other
+ * exceptions in ScriptException, this is a choice.
+ */
@throws(classOf[ScriptException])
def eval(context: ScriptContext): Object = {
val result = req.lineRep.evalEither match {
+ case Left(e: RuntimeException) => throw e
case Left(e: Exception) => throw new ScriptException(e)
- case Left(_) => throw new ScriptException("run-time error")
+ case Left(e) => throw e
case Right(result) => result.asInstanceOf[Object]
}
if (!recorded) {
@@ -897,7 +907,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
val preamble = """
|object %s {
| %s
- | val %s: String = %s {
+ | lazy val %s: String = %s {
| %s
| (""
""".stripMargin.format(
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index 19fa562234..8b8b668c9f 100644
--- a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -15,7 +15,7 @@ import scala.reflect.internal.util.StringOps.longestCommonPrefix
class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
val global: intp.global.type = intp.global
import global._
- import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import definitions._
import rootMirror.{ RootClass, getModuleIfDefined }
type ExecResult = Any
import intp.{ debugging }
@@ -39,7 +39,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
// for some reason any's members don't show up in subclasses, which
// we need so 5.<tab> offers asInstanceOf etc.
- private def anyMembers = AnyClass.tpe.nonPrivateMembers
+ private def anyMembers = AnyTpe.nonPrivateMembers
def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
def tos(sym: Symbol): String = sym.decodedName
@@ -152,7 +152,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
}
// not for completion but for excluding
- object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
+ object anyref extends TypeMemberCompletion(AnyRefTpe) { }
// the unqualified vals/defs/etc visible in the repl
object ids extends CompletionAware {
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
index a895944c15..ef6f4c2920 100644
--- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
@@ -234,8 +235,6 @@ class JavapClass(
}
class JavapTool7 extends JavapTool {
-
- import JavapTool._
type Task = {
def call(): Boolean // true = ok
//def run(args: Array[String]): Int // all args
@@ -606,7 +605,7 @@ object JavapClass {
// s = "f" and $line.$read$$etc$#f is what we're after,
// ignoring any #member (except take # as filter on #apply)
orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
- getOrElse (k, member, filter, false))
+ getOrElse ((k, member, filter, false)))
}
/** Find the classnames of anonfuns associated with k,
* where k may be an available class or a symbol in scope.
diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 4bba27b714..12d6ee5112 100644
--- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -3,7 +3,9 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package interpreter
import scala.collection.{ mutable, immutable }
diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 57f3675ada..7f577b3a8b 100644
--- a/src/repl/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import scala.util.Properties.lineSeparator
diff --git a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
index 2d0917d91f..6634dc6944 100644
--- a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -3,7 +3,8 @@
* @author Stepan Koltsov
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import java.io.{ BufferedReader }
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index 52a085080b..f82c38f5e7 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -10,6 +10,7 @@ import scala.reflect.{ classTag, ClassTag }
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ClassTag, classTag}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
+import scala.util.control.Exception.catching
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
@@ -127,6 +128,47 @@ package object interpreter extends ReplConfig with ReplStrings {
""
}
+ def kindCommandInternal(expr: String, verbose: Boolean): Unit = {
+ val catcher = catching(classOf[MissingRequirementError],
+ classOf[ScalaReflectionException])
+ def typeFromTypeString: Option[ClassSymbol] = catcher opt {
+ exprTyper.typeOfTypeString(expr).typeSymbol.asClass
+ }
+ def typeFromNameTreatedAsTerm: Option[ClassSymbol] = catcher opt {
+ val moduleClass = exprTyper.typeOfExpression(expr).typeSymbol
+ moduleClass.linkedClassOfClass.asClass
+ }
+ def typeFromFullName: Option[ClassSymbol] = catcher opt {
+ intp.global.rootMirror.staticClass(expr)
+ }
+ def typeOfTerm: Option[TypeSymbol] = replInfo(symbolOfLine(expr)).typeSymbol match {
+ case sym: TypeSymbol => Some(sym)
+ case _ => None
+ }
+ (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
+ val (kind, tpe) = exitingTyper {
+ val tpe = sym.tpeHK
+ (intp.global.inferKind(NoPrefix)(tpe, sym.owner), tpe)
+ }
+ echoKind(tpe, kind, verbose)
+ }
+ }
+
+ def echoKind(tpe: Type, kind: Kind, verbose: Boolean) {
+ def typeString(tpe: Type): String = {
+ tpe match {
+ case TypeRef(_, sym, _) => typeString(sym.typeSignature)
+ case RefinedType(_, _) => tpe.toString
+ case _ => tpe.typeSymbol.fullName
+ }
+ }
+ printAfterTyper(typeString(tpe) + "'s kind is " + kind.scalaNotation)
+ if (verbose) {
+ echo(kind.starNotation)
+ echo(kind.description)
+ }
+ }
+
/** TODO -
* -n normalize
* -l label with case class parameter names
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 8c43cdaafe..db4163c8af 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -10,49 +10,44 @@
package org.scalacheck
import util.{FreqMap,Buildable}
-import scala.reflect.ClassTag
sealed abstract class Arbitrary[T] {
val arbitrary: Gen[T]
}
-/** Defines implicit <code>Arbitrary</code> instances for common types.
+/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
* <p>
* ScalaCheck
- * uses implicit <code>Arbitrary</code> instances when creating properties
- * out of functions with the <code>Prop.property</code> method, and when
- * the <code>Arbitrary.arbitrary</code> method is used. For example, the
+ * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
+ * out of functions with the `Prop.property` method, and when
+ * the `Arbitrary.arbitrary` method is used. For example, the
* following code requires that there exists an implicit
- * <code>Arbitrary[MyClass]</code> instance:
+ * `Arbitrary[MyClass]` instance:
* </p>
*
- * <p>
- * <code>
- * val myProp = Prop.forAll { myClass: MyClass =&gt;<br />
- * ...<br />
- * }<br />
+ * {{{
+ * val myProp = Prop.forAll { myClass: MyClass =>
+ * ...
+ * }
*
* val myGen = Arbitrary.arbitrary[MyClass]
- * </code>
- * </p>
+ * }}}
*
* <p>
* The required implicit definition could look like this:
* </p>
*
- * <p>
- * <code>
+ * {{{
* implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- * </code>
- * </p>
+ * }}}
*
* <p>
- * The factory method <code>Arbitrary(...)</code> takes a generator of type
- * <code>Gen[T]</code> and returns an instance of <code>Arbitrary[T]</code>.
+ * The factory method `Arbitrary(...)` takes a generator of type
+ * `Gen[T]` and returns an instance of `Arbitrary[T]`.
* </p>
*
* <p>
- * The <code>Arbitrary</code> module defines implicit <code>Arbitrary</code>
+ * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
* instances for common types, for convenient use in your properties and
* generators.
* </p>
@@ -93,7 +88,7 @@ object Arbitrary {
/** Arbitrary instance of Long */
implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
- Gen.chooseNum(Long.MinValue / 2, Long.MaxValue / 2)
+ Gen.chooseNum(Long.MinValue, Long.MaxValue)
)
/** Arbitrary instance of Float */
@@ -182,7 +177,13 @@ object Arbitrary {
mc <- mcGen
limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
- } yield BigDecimal(x, scale, mc)
+ } yield {
+ try {
+ BigDecimal(x, scale, mc)
+ } catch {
+ case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
+ }
+ }
Arbitrary(bdGen)
}
@@ -213,23 +214,43 @@ object Arbitrary {
))
}
- /** Arbitrary instance of test params */
+ /** Arbitrary instance of test params
+ * @deprecated (in 1.10.0) Use `arbTestParameters` instead.
+ */
+ @deprecated("Use 'arbTestParameters' instead", "1.10.0")
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
minSuccTests <- choose(10,200)
- maxDiscardRatio <- choose(0.2f,10f)
- minSize <- choose(0,500)
+ maxDiscTests <- choose(100,500)
+ mnSize <- choose(0,500)
sizeDiff <- choose(0,500)
- maxSize <- choose(minSize, minSize + sizeDiff)
+ mxSize <- choose(mnSize, mnSize + sizeDiff)
ws <- choose(1,4)
} yield Test.Params(
minSuccessfulTests = minSuccTests,
- maxDiscardRatio = maxDiscardRatio,
- minSize = minSize,
- maxSize = maxSize,
+ maxDiscardedTests = maxDiscTests,
+ minSize = mnSize,
+ maxSize = mxSize,
workers = ws
))
+ /** Arbitrary instance of test parameters */
+ implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
+ Arbitrary(for {
+ _minSuccTests <- choose(10,200)
+ _maxDiscardRatio <- choose(0.2f,10f)
+ _minSize <- choose(0,500)
+ sizeDiff <- choose(0,500)
+ _maxSize <- choose(_minSize, _minSize + sizeDiff)
+ _workers <- choose(1,4)
+ } yield new Test.Parameters.Default {
+ override val minSuccessfulTests = _minSuccTests
+ override val maxDiscardRatio = _maxDiscardRatio
+ override val minSize = _minSize
+ override val maxSize = _maxSize
+ override val workers = _workers
+ })
+
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
Arbitrary(for {
@@ -278,7 +299,7 @@ object Arbitrary {
): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
/** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassTag[T]
+ implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
index 8959211f09..4961c78a26 100644
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ b/src/scalacheck/org/scalacheck/Arg.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 2acc460b5e..604b68cb36 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -53,7 +53,7 @@ trait Commands extends Prop {
* takes the current abstract state as parameter and returns a boolean
* that says if the precondition is fulfilled or not. You can add several
* conditions to the precondition list */
- val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
+ val preConditions = new collection.mutable.ListBuffer[State => Boolean]
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
@@ -65,7 +65,7 @@ trait Commands extends Prop {
* method. The postcondition function should return a Boolean (or
* a Prop instance) that says if the condition holds or not. You can add several
* conditions to the postConditions list. */
- val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
+ val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
}
/** A command that binds its result for later use */
@@ -87,6 +87,11 @@ trait Commands extends Prop {
private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
+ private def initState() = {
+ bindings.clear()
+ initialState()
+ }
+
private def genCmds: Gen[Cmds] = {
def sizedCmds(s: State)(sz: Int): Gen[Cmds] =
if(sz <= 0) value(Cmds(Nil, Nil)) else for {
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
index 93f1dc222e..d565322d99 100644
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
index 64bb61c2d3..aec67159f1 100644
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ b/src/scalacheck/org/scalacheck/Gen.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -23,7 +23,7 @@ object Choose {
implicit val chooseLong: Choose[Long] = new Choose[Long] {
def choose(low: Long, high: Long) =
- if(low > high || (high-low < 0)) fail
+ if (low > high) fail
else parameterized(prms => value(prms.choose(low,high)))
}
@@ -204,9 +204,17 @@ object Gen {
/** @throws IllegalArgumentException if l is greater than h, or if
* the range between l and h doesn't fit in a Long. */
def choose(l: Long, h: Long): Long = {
- val d = h-l
- if (d < 0) throw new IllegalArgumentException("Invalid range")
- else l + math.abs(rng.nextLong % (d+1))
+ if (h < l) throw new IllegalArgumentException("Invalid range")
+ val d = h - l + 1
+ if (d <= 0) {
+ var n = rng.nextLong
+ while (n < l || n > h) {
+ n = rng.nextLong
+ }
+ n
+ } else {
+ l + math.abs(rng.nextLong % d)
+ }
}
/** @throws IllegalArgumentException if l is greater than h, or if
@@ -225,8 +233,11 @@ object Gen {
def apply(p: Gen.Params) = g(p)
}
- /* Convenience method for using the <code>frequency</code> method like this:
- * <code>frequency((1, "foo"), (3, "bar"))</code> */
+ /* Convenience method for using the `frequency` method like this:
+ * {{{
+ * frequency((1, "foo"), (3, "bar"))
+ * }}}
+ */
implicit def freqTuple[T](t: (Int, T)): (Int, Gen[T]) = (t._1, value(t._2))
@@ -308,9 +319,9 @@ object Gen {
//// List Generators ////
/** Generates a container of any type for which there exists an implicit
- * <code>Buildable</code> instance. The elements in the container will
+ * [[org.scalacheck.util.Buildable]] instance. The elements in the container will
* be generated by the given generator. The size of the generated container
- * is given by <code>n</code>. */
+ * is given by `n`. */
def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit b: Buildable[T,C]
): Gen[C[T]] = sequence[C,T](new Iterable[Gen[T]] {
def iterator = new Iterator[Gen[T]] {
@@ -321,14 +332,14 @@ object Gen {
})
/** Generates a container of any type for which there exists an implicit
- * <code>Buildable</code> instance. The elements in the container will
- * be generated by the given generator. The size of the container is
+ * [[org.scalacheck.util.Buildable]] instance. The elements in the container
+ * will be generated by the given generator. The size of the container is
* bounded by the size parameter used when generating values. */
def containerOf[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
sized(size => for(n <- choose(0,size); c <- containerOfN[C,T](n,g)) yield c)
/** Generates a non-empty container of any type for which there exists an
- * implicit <code>Buildable</code> instance. The elements in the container
+ * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the container
* will be generated by the given generator. The size of the container is
* bounded by the size parameter used when generating values. */
def containerOf1[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
@@ -336,16 +347,16 @@ object Gen {
/** Generates a list of random length. The maximum length depends on the
* size parameter. This method is equal to calling
- * <code>containerOf[List,T](g)</code>. */
+ * `containerOf[List,T](g)`. */
def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
/** Generates a non-empty list of random length. The maximum length depends
* on the size parameter. This method is equal to calling
- * <code>containerOf1[List,T](g)</code>. */
+ * `containerOf1[List,T](g)`. */
def listOf1[T](g: => Gen[T]) = containerOf1[List,T](g)
/** Generates a list of the given length. This method is equal to calling
- * <code>containerOfN[List,T](n,g)</code>. */
+ * `containerOfN[List,T](n,g)`. */
def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
/** A generator that picks a random number of elements from a list */
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index eeb5936086..3e8f6de5f6 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -96,7 +96,7 @@ object Pretty {
}
implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: scala.collection.immutable.Set[String]) =
+ def labels(ls: collection.immutable.Set[String]) =
if(ls.isEmpty) ""
else "> Labels of failing property: " / ls.mkString("\n")
val s = res.status match {
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
index dfd85a832a..38e00f260f 100644
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ b/src/scalacheck/org/scalacheck/Prop.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -31,16 +31,27 @@ trait Prop {
/** Convenience method that checks this property with the given parameters
* and reports the result on the console. If you need to get the results
- * from the test use the <code>check</code> methods in <code>Test</code>
- * instead. */
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
+ * instead.
+ * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
+ */
+ @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
def check(prms: Test.Params): Unit = Test.check(
prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
)
+ /** Convenience method that checks this property with the given parameters
+ * and reports the result on the console. If you need to get the results
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
+ * instead. */
+ def check(prms: Test.Parameters): Unit = Test.check(
+ prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
+ )
+
/** Convenience method that checks this property and reports the
* result on the console. If you need to get the results from the test use
- * the <code>check</code> methods in <code>Test</code> instead. */
- def check: Unit = check(Test.Params())
+ * the `check` methods in [[org.scalacheck.Test]] instead. */
+ def check: Unit = check(Test.Parameters.default)
/** The logic for main, separated out to make it easier to
* avoid System.exit calls. Returns exit code.
@@ -60,7 +71,7 @@ trait Prop {
/** Whether main should call System.exit with an exit code.
* Defaults to true; override to change.
*/
- def mainCallsExit = false
+ def mainCallsExit = true
/** Convenience method that makes it possible to use this property
* as an application that checks itself on execution */
@@ -274,20 +285,51 @@ object Prop {
def apply(r: Result): Prop = Prop(prms => r)
+ def apply(b: Boolean): Prop = if(b) proved else falsified
- // Implicit defs
+ // Implicits
+
+ /** A collection of property operators on [[Any]] values.
+ * Import [[Prop.AnyOperators]] to make the operators available. */
class ExtendedAny[T <% Pretty](x: => T) {
+ /** See [[Prop.imply]] */
def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
+ /** See [[Prop.iff]] */
def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
- def throws[U <: Throwable](c: Class[U]) = Prop.throws(x, c)
+ @deprecated("Use 'Prop.throws' instead", "1.10.1")
+ def throws[U <: Throwable](c: Class[U]): Prop = Prop.throws(c)(x)
+ /** See [[Prop.?=]] */
def ?=(y: T) = Prop.?=(x, y)
+ /** See [[Prop.=?]] */
def =?(y: T) = Prop.=?(x, y)
}
+ /** A collection of property operators on [[Boolean]] values.
+ * Import [[Prop.BooleanOperators]] to make the operators available. */
+ class ExtendedBoolean(b: => Boolean) {
+ /** See [[Prop.==>]] */
+ def ==>(p: => Prop) = Prop(b) ==> p
+ }
+
+ /** Implicit method that makes a number of property operators on values of
+ * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
+ * documentation on the operators. */
+ @deprecated("Use 'Prop.AnyOperators' instead", "1.10.1")
implicit def extendedAny[T <% Pretty](x: => T) = new ExtendedAny[T](x)
- implicit def propBoolean(b: Boolean): Prop = if(b) proved else falsified
+ /** Implicit method that makes a number of property operators on values of
+ * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
+ * documentation on the operators. */
+ implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x)
+
+ /** Implicit method that makes a number of property operators on boolean
+ * values available in the current scope. See [[Prop.ExtendedBoolean]] for
+ * documentation on the operators. */
+ implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b)
+
+ /** Implicit conversion of Boolean values to Prop values. */
+ implicit def propBoolean(b: Boolean): Prop = Prop(b)
// Private support functions
@@ -318,6 +360,9 @@ object Prop {
/** A property that denotes an exception */
lazy val exception: Prop = exception(null)
+ /** Create a property that compares to values. If the values aren't equal,
+ * the property will fail and report that first value doesn't match the
+ * expected (second) value. */
def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if(x == y) proved else falsified :| {
val exp = Pretty.pretty[T](y, Pretty.Params(0))
@@ -325,6 +370,9 @@ object Prop {
"Expected "+exp+" but got "+act
}
+ /** Create a property that compares to values. If the values aren't equal,
+ * the property will fail and report that second value doesn't match the
+ * expected (first) value. */
def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
/** A property that depends on the generator size */
@@ -340,7 +388,7 @@ object Prop {
secure(if(f.isDefinedAt(x)) f(x) else undecided)
/** Property holds only if the given partial function is defined at
- * <code>x</code>, and returns a property that holds */
+ * `x`, and returns a property that holds */
def iff[T](x: T, f: PartialFunction[T,Prop]): Prop =
secure(if(f.isDefinedAt(x)) f(x) else falsified)
@@ -365,9 +413,16 @@ object Prop {
def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
/** A property that holds if the given statement throws an exception
+ * of the specified type
+ * @deprecated (in 1.10.1) Use `throws(...): Boolean` instead.
+ */
+ @deprecated("Use 'throws(...): Boolean' instead", "1.10.1")
+ def throws[T <: Throwable](x: => Any, c: Class[T]): Prop = throws(c)(x)
+
+ /** Returns true if the given statement throws an exception
* of the specified type */
- def throws[T <: Throwable](x: => Any, c: Class[T]) =
- try { x; falsified } catch { case e if c.isInstance(e) => proved }
+ def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean =
+ try { x; false } catch { case e if c.isInstance(e) => true }
/** Collect data for presentation in test report */
def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
@@ -390,7 +445,7 @@ object Prop {
/** Wraps and protects a property */
def secure[P <% Prop](p: => P): Prop =
- try { p: Prop } catch { case e => exception(e) }
+ try { p: Prop } catch { case e: Throwable => exception(e) }
/** Existential quantifier for an explicit generator. */
def exists[A,P](f: A => P)(implicit
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
index 26059231d6..d4836d7420 100644
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ b/src/scalacheck/org/scalacheck/Properties.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -14,15 +14,15 @@ package org.scalacheck
* holds if and only if all of the contained properties hold.
* <p>Properties are added in the following way:</p>
*
- * <p>
- * <code>
+ * {{{
* object MyProps extends Properties("MyProps") {
- * property("myProp1") = forAll { (n:Int, m:Int) =&gt;
+ * property("myProp1") = forAll { (n:Int, m:Int) =>
* n+m == m+n
* }
*
* property("myProp2") = ((0/1) throws classOf[ArithmeticException])
* }
+ * }}}
*/
class Properties(val name: String) extends Prop {
@@ -42,16 +42,27 @@ class Properties(val name: String) extends Prop {
/** Convenience method that checks the properties with the given parameters
* and reports the result on the console. If you need to get the results
- * from the test use the <code>check</code> methods in <code>Test</code>
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
* instead. */
+ override def check(prms: Test.Parameters): Unit = Test.checkProperties(
+ prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
+ )
+
+ /** Convenience method that checks the properties with the given parameters
+ * and reports the result on the console. If you need to get the results
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
+ * instead.
+ * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
+ */
+ @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
override def check(prms: Test.Params): Unit = Test.checkProperties(
prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
)
/** Convenience method that checks the properties and reports the
* result on the console. If you need to get the results from the test use
- * the <code>check</code> methods in <code>Test</code> instead. */
- override def check: Unit = check(Test.Params())
+ * the `check` methods in [[org.scalacheck.Test]] instead. */
+ override def check: Unit = check(Test.Parameters.default)
/** The logic for main, separated out to make it easier to
* avoid System.exit calls. Returns exit code.
@@ -73,7 +84,10 @@ class Properties(val name: String) extends Prop {
def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
/** Used for specifying properties. Usage:
- * <code>property("myProp") = ...</code> */
+ * {{{
+ * property("myProp") = ...
+ * }}}
+ */
class PropertySpecifier() {
def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
}
diff --git a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
new file mode 100644
index 0000000000..7764101844
--- /dev/null
+++ b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
@@ -0,0 +1,92 @@
+/*-------------------------------------------------------------------------*\
+** ScalaCheck **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
+** http://www.scalacheck.org **
+** **
+** This software is released under the terms of the Revised BSD License. **
+** There is NO WARRANTY. See the file LICENSE for the full text. **
+\*------------------------------------------------------------------------ */
+
+// vim: set ts=2 sw=2 et:
+
+package org.scalacheck
+
+import org.scalatools.testing._
+
+class ScalaCheckFramework extends Framework {
+
+ private case object PropFingerprint extends TestFingerprint {
+ val superClassName = "org.scalacheck.Prop"
+ val isModule = false
+ }
+
+ private case object PropsFingerprint extends TestFingerprint {
+ val superClassName = "org.scalacheck.Properties"
+ val isModule = true
+ }
+
+ val name = "ScalaCheck"
+
+ val tests = Array[Fingerprint](PropsFingerprint, PropsFingerprint)
+
+ def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 {
+
+ private def asEvent(nr: (String, Test.Result)) = nr match {
+ case (n: String, r: Test.Result) => new Event {
+ val testName = n
+ val description = n
+ val result = r.status match {
+ case Test.Passed => Result.Success
+ case _:Test.Proved => Result.Success
+ case _:Test.Failed => Result.Failure
+ case Test.Exhausted => Result.Skipped
+ case _:Test.PropException | _:Test.GenException => Result.Error
+ }
+ val error = r.status match {
+ case Test.PropException(_, e, _) => e
+ case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0)))
+ case _ => null
+ }
+ }
+ }
+
+ def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) {
+
+ val testCallback = new Test.TestCallback {
+ override def onPropEval(n: String, w: Int, s: Int, d: Int) = {}
+
+ override def onTestResult(n: String, r: Test.Result) = {
+ for (l <- loggers) {
+ import Pretty._
+ l.info(
+ (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(0))
+ )
+ }
+ handler.handle(asEvent((n,r)))
+ }
+ }
+
+ import Test.cmdLineParser.{Success, NoSuccess}
+ val prms = Test.cmdLineParser.parseParams(args) match {
+ case Success(params, _) =>
+ params.copy(_testCallback = testCallback, _customClassLoader = Some(loader))
+ // TODO: Maybe handle this a bit better than throwing exception?
+ case e: NoSuccess => throw new Exception(e.toString)
+ }
+
+ fingerprint match {
+ case fp: SubclassFingerprint =>
+ if(fp.isModule) {
+ val obj = Class.forName(testClassName + "$", true, loader)
+ val ps = obj.getField("MODULE$").get(null).asInstanceOf[Properties]
+ Test.checkProperties(prms, ps)
+ } else {
+ val p = Class.forName(testClassName, true, loader).newInstance.asInstanceOf[Prop]
+ handler.handle(asEvent((testClassName, Test.check(prms, p))))
+ }
+ }
+ }
+
+ }
+
+}
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
index ae15bd9616..4895171a35 100644
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ b/src/scalacheck/org/scalacheck/Shrink.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
index 4368184823..6e9b6b88fd 100644
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ b/src/scalacheck/org/scalacheck/Test.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -16,23 +16,120 @@ object Test {
import Prop.FM
import util.CmdLineParser
- /** Test parameters */
+ /** Test parameters used by the `Test.check` method.
+ */
+ trait Parameters {
+ /** The minimum number of tests that must succeed for ScalaCheck to
+ * consider a property passed. */
+ def minSuccessfulTests: Int
+
+ /** The starting size given as parameter to the generators. */
+ def minSize: Int
+
+ /** The maximum size given as parameter to the generators. */
+ def maxSize: Int
+
+ /** The random numbe generator used. */
+ def rng: java.util.Random
+
+ /** The number of tests run in parallell. */
+ def workers: Int
+
+ /** A callback that ScalaCheck calls each time a test is executed. */
+ def testCallback: TestCallback
+
+ /** The maximum ratio between discarded and passed tests allowed before
+ * ScalaCheck gives up and discards the property. At least
+ * `minSuccesfulTests` will always be run, though. */
+ def maxDiscardRatio: Float
+
+ /** A custom class loader that should be used during test execution. */
+ def customClassLoader: Option[ClassLoader]
+
+ // private since we can't guarantee binary compatibility for this one
+ private[scalacheck] def copy(
+ _minSuccessfulTests: Int = Parameters.this.minSuccessfulTests,
+ _minSize: Int = Parameters.this.minSize,
+ _maxSize: Int = Parameters.this.maxSize,
+ _rng: java.util.Random = Parameters.this.rng,
+ _workers: Int = Parameters.this.workers,
+ _testCallback: TestCallback = Parameters.this.testCallback,
+ _maxDiscardRatio: Float = Parameters.this.maxDiscardRatio,
+ _customClassLoader: Option[ClassLoader] = Parameters.this.customClassLoader
+ ): Parameters = new Parameters {
+ val minSuccessfulTests: Int = _minSuccessfulTests
+ val minSize: Int = _minSize
+ val maxSize: Int = _maxSize
+ val rng: java.util.Random = _rng
+ val workers: Int = _workers
+ val testCallback: TestCallback = _testCallback
+ val maxDiscardRatio: Float = _maxDiscardRatio
+ val customClassLoader: Option[ClassLoader] = _customClassLoader
+ }
+ }
+
+ /** Test parameters used by the `Test.check` method.
+ *
+ * To override default values, extend the
+ * [[org.scalacheck.Test.Parameters.Default]] trait:
+ *
+ * {{{
+ * val myParams = new Parameters.Default {
+ * override val minSuccesfulTests = 600
+ * override val maxDiscardRatio = 8
+ * }
+ * }}}
+ */
+ object Parameters {
+ /** Default test parameters trait. This can be overriden if you need to
+ * tweak the parameters. */
+ trait Default extends Parameters {
+ val minSuccessfulTests: Int = 100
+ val minSize: Int = 0
+ val maxSize: Int = Gen.Params().size
+ val rng: java.util.Random = Gen.Params().rng
+ val workers: Int = 1
+ val testCallback: TestCallback = new TestCallback {}
+ val maxDiscardRatio: Float = 5
+ val customClassLoader: Option[ClassLoader] = None
+ }
+
+ /** Default test parameters instance. */
+ val default: Parameters = new Default {}
+ }
+
+ /** Test parameters
+ * @deprecated (in 1.10.0) Use [[org.scalacheck.Test.Parameters]] instead.
+ */
+ @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
case class Params(
minSuccessfulTests: Int = 100,
-
- /** @deprecated Use maxDiscardRatio instead. */
- @deprecated("Use maxDiscardRatio instead.", "1.10")
maxDiscardedTests: Int = -1,
-
minSize: Int = 0,
maxSize: Int = Gen.Params().size,
rng: java.util.Random = Gen.Params().rng,
workers: Int = 1,
- testCallback: TestCallback = new TestCallback {},
- maxDiscardRatio: Float = 5,
- customClassLoader: Option[ClassLoader] = None
+ testCallback: TestCallback = new TestCallback {}
)
+ @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
+ private def paramsToParameters(params: Params) = new Parameters {
+ val minSuccessfulTests = params.minSuccessfulTests
+ val minSize = params.minSize
+ val maxSize = params.maxSize
+ val rng = params.rng
+ val workers = params.workers
+ val testCallback = params.testCallback
+
+ // maxDiscardedTests is deprecated, but if someone
+ // uses it let it override maxDiscardRatio
+ val maxDiscardRatio =
+ if(params.maxDiscardedTests < 0) Parameters.default.maxDiscardRatio
+ else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
+
+ val customClassLoader = Parameters.default.customClassLoader
+ }
+
/** Test statistics */
case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
def passed = status match {
@@ -92,7 +189,7 @@ object Test {
}
}
- private def assertParams(prms: Params) = {
+ private def assertParams(prms: Parameters) = {
import prms._
if(
minSuccessfulTests <= 0 ||
@@ -104,16 +201,24 @@ object Test {
}
private def secure[T](x: => T): Either[T,Throwable] =
- try { Left(x) } catch { case e => Right(e) }
+ try { Left(x) } catch { case e: Throwable => Right(e) }
private[scalacheck] lazy val cmdLineParser = new CmdLineParser {
object OptMinSuccess extends IntOpt {
- val default = Test.Params().minSuccessfulTests
+ val default = Parameters.default.minSuccessfulTests
val names = Set("minSuccessfulTests", "s")
val help = "Number of tests that must succeed in order to pass a property"
}
+ object OptMaxDiscarded extends IntOpt {
+ val default = -1
+ val names = Set("maxDiscardedTests", "d")
+ val help =
+ "Number of tests that can be discarded before ScalaCheck stops " +
+ "testing a property. NOTE: this option is deprecated, please use " +
+ "the option maxDiscardRatio (-r) instead."
+ }
object OptMaxDiscardRatio extends FloatOpt {
- val default = Test.Params().maxDiscardRatio
+ val default = Parameters.default.maxDiscardRatio
val names = Set("maxDiscardRatio", "r")
val help =
"The maximum ratio between discarded and succeeded tests " +
@@ -121,17 +226,17 @@ object Test {
"least minSuccessfulTests will always be tested, though."
}
object OptMinSize extends IntOpt {
- val default = Test.Params().minSize
+ val default = Parameters.default.minSize
val names = Set("minSize", "n")
val help = "Minimum data generation size"
}
object OptMaxSize extends IntOpt {
- val default = Test.Params().maxSize
+ val default = Parameters.default.maxSize
val names = Set("maxSize", "x")
val help = "Maximum data generation size"
}
object OptWorkers extends IntOpt {
- val default = Test.Params().workers
+ val default = Parameters.default.workers
val names = Set("workers", "w")
val help = "Number of threads to execute in parallel for testing"
}
@@ -142,54 +247,63 @@ object Test {
}
val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
+ OptMinSuccess, OptMaxDiscarded, OptMaxDiscardRatio, OptMinSize,
OptMaxSize, OptWorkers, OptVerbosity
)
def parseParams(args: Array[String]) = parseArgs(args) {
- optMap => Test.Params(
- minSuccessfulTests = optMap(OptMinSuccess),
- maxDiscardRatio = optMap(OptMaxDiscardRatio),
- minSize = optMap(OptMinSize),
- maxSize = optMap(OptMaxSize),
- rng = Test.Params().rng,
- workers = optMap(OptWorkers),
- testCallback = ConsoleReporter(optMap(OptVerbosity))
+ optMap => Parameters.default.copy(
+ _minSuccessfulTests = optMap(OptMinSuccess),
+ _maxDiscardRatio =
+ if (optMap(OptMaxDiscarded) < 0) optMap(OptMaxDiscardRatio)
+ else optMap(OptMaxDiscarded).toFloat / optMap(OptMinSuccess),
+ _minSize = optMap(OptMinSize),
+ _maxSize = optMap(OptMaxSize),
+ _workers = optMap(OptWorkers),
+ _testCallback = ConsoleReporter(optMap(OptVerbosity))
)
}
}
/** Tests a property with the given testing parameters, and returns
- * the test results. */
+ * the test results.
+ * @deprecated (in 1.10.0) Use
+ * `check(Parameters, Properties)` instead.
+ */
+ @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
def check(params: Params, p: Prop): Result = {
+ check(paramsToParameters(params), p)
+ }
- // maxDiscardedTests is deprecated, but if someone
- // uses it let it override maxDiscardRatio
- val mdr =
- if(params.maxDiscardedTests < 0) params.maxDiscardRatio
- else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
- val prms = params.copy( maxDiscardRatio = mdr)
-
- import prms._
- import scala.actors.Futures.future
+ /** Tests a property with the given testing parameters, and returns
+ * the test results. */
+ def check(params: Parameters, p: Prop): Result = {
+ import params._
- assertParams(prms)
- if(workers > 1)
+ assertParams(params)
+ if(workers > 1) {
assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
+ }
val iterations = math.ceil(minSuccessfulTests / (workers: Double))
val sizeStep = (maxSize-minSize) / (iterations*workers)
var stop = false
- def worker(workerIdx: Int) = future {
- params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ def worker(workerIdx: Int) =
+ if (workers < 2) () => workerFun(workerIdx)
+ else actors.Futures.future {
+ params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ workerFun(workerIdx)
+ }
+
+ def workerFun(workerIdx: Int) = {
var n = 0 // passed tests
var d = 0 // discarded tests
var res: Result = null
var fm = FreqMap.empty[immutable.Set[Any]]
while(!stop && res == null && n < iterations) {
val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
- val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
+ val propPrms = Prop.Params(Gen.Params(size.round.toInt, params.rng), fm)
secure(p(propPrms)) match {
case Right(e) => res =
Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
@@ -250,11 +364,20 @@ object Test {
stop = true
results foreach (_.apply())
val timedRes = r.copy(time = System.currentTimeMillis-start)
- prms.testCallback.onTestResult("", timedRes)
+ params.testCallback.onTestResult("", timedRes)
timedRes
}
+ /** Check a set of properties.
+ * @deprecated (in 1.10.0) Use
+ * `checkProperties(Parameters, Properties)` instead.
+ */
+ @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
+ checkProperties(paramsToParameters(prms), ps)
+
+ /** Check a set of properties. */
+ def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] =
ps.properties.map { case (name,p) =>
val testCallback = new TestCallback {
override def onPropEval(n: String, t: Int, s: Int, d: Int) =
@@ -262,7 +385,7 @@ object Test {
override def onTestResult(n: String, r: Result) =
prms.testCallback.onTestResult(name,r)
}
- val res = check(prms copy (testCallback = testCallback), p)
+ val res = check(prms copy (_testCallback = testCallback), p)
(name,res)
}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index 221b8a61c3..140c541a95 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -10,7 +10,6 @@
package org.scalacheck.util
import scala.collection._
-import scala.reflect.ClassTag
trait Buildable[T,C[_]] {
def builder: mutable.Builder[T,C[T]]
@@ -31,7 +30,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit cm: ClassTag[T]) =
+ implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index 4683c34a65..eb3a91fe59 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -30,7 +30,7 @@ trait CmdLineParser extends Parsers {
trait StrOpt extends Opt[String]
class OptMap {
- private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
+ private val opts = new collection.mutable.HashMap[Opt[_], Any]
def apply(flag: Flag): Boolean = opts.contains(flag)
def apply[T](opt: Opt[T]): T = opts.get(opt) match {
case None => opt.default
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
index c7474d3b87..d0686aec72 100644
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ b/src/scalacheck/org/scalacheck/util/FreqMap.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
index 317b0ccd10..7c1dc8dcc4 100644
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ b/src/scalacheck/org/scalacheck/util/StdRand.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index bf6d6ffed7..d407b93a4b 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -9,9 +9,10 @@ package doc
import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
import scala.reflect.internal.Chars._
import symtab._
-import reporters.Reporter
import typechecker.Analyzer
import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo, LinkToExternal }
+import scala.language.postfixOps
trait ScaladocAnalyzer extends Analyzer {
val global : Global // generally, a ScaladocGlobal
@@ -150,20 +151,52 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
- private var docBuffer: StringBuilder = null // buffer for comments
- private var docPos: Position = NoPosition // last doc comment position
- private var inDocComment = false
+ private var docBuffer: StringBuilder = null // buffer for comments (non-null while scanning)
+ private var inDocComment = false // if buffer contains double-star doc comment
+ private var lastDoc: DocComment = null // last comment if it was double-star doc
+ private object unmooredParser extends { // minimalist comment parser
+ val global: Global = ScaladocSyntaxAnalyzer.this.global
+ }
+ with CommentFactoryBase with MemberLookupBase {
+ import global.{ settings, Symbol }
+ def parseComment(comment: DocComment) = {
+ val nowarnings = settings.nowarn.value
+ settings.nowarn.value = true
+ try parseAtSymbol(comment.raw, comment.raw, comment.pos)
+ finally settings.nowarn.value = nowarnings
+ }
+
+ override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
+ override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption orNull
+ override def toString(link: LinkTo): String = "No link"
+ override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
+ override def warnNoLink: Boolean = false
+ }
+
+ /**
+ * Warn when discarding buffered doc at the end of a block.
+ * This mechanism doesn't warn about arbitrary unmoored doc.
+ * Also warn under -Xlint, but otherwise only warn in the presence of suspicious
+ * tags that appear to be documenting API. Warnings are suppressed while parsing
+ * the local comment so that comments of the form `[at] Martin` will not trigger a warning.
+ * By omission, tags for `see`, `todo`, `note` and `example` are ignored.
+ */
override def discardDocBuffer() = {
+ import scala.tools.nsc.doc.base.comment.Comment
val doc = flushDoc
- if (doc ne null)
- unit.warning(docPos, "discarding unmoored doc comment")
+ // tags that make a local double-star comment look unclean, as though it were API
+ def unclean(comment: Comment): Boolean = {
+ import comment._
+ authors.nonEmpty || result.nonEmpty || throws.nonEmpty || valueParams.nonEmpty ||
+ typeParams.nonEmpty || version.nonEmpty || since.nonEmpty
+ }
+ def isDirty = unclean(unmooredParser parseComment doc)
+ if ((doc ne null) && (settings.lint || isDirty))
+ unit.warning(doc.pos, "discarding unmoored doc comment")
}
- override def flushDoc(): DocComment = {
- if (docBuffer eq null) null
- else try DocComment(docBuffer.toString, docPos) finally docBuffer = null
- }
+ override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
override protected def putCommentChar() {
if (inDocComment)
@@ -182,23 +215,19 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
super.skipBlockComment()
}
override def skipComment(): Boolean = {
- super.skipComment() && {
- if (docBuffer ne null) {
- if (inDocComment)
- foundDocComment(docBuffer.toString, offset, charOffset - 2)
- else
- try foundComment(docBuffer.toString, offset, charOffset - 2) finally docBuffer = null
- }
+ // emit a block comment; if it's double-star, make Doc at this pos
+ def foundStarComment(start: Int, end: Int) = try {
+ val str = docBuffer.toString
+ val pos = new RangePosition(unit.source, start, start, end)
+ unit.comment(pos, str)
+ if (inDocComment)
+ lastDoc = DocComment(str, pos)
true
+ } finally {
+ docBuffer = null
+ inDocComment = false
}
- }
- def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
- def foundDocComment(value: String, start: Int, end: Int) {
- docPos = new RangePosition(unit.source, start, start, end)
- unit.comment(docPos, value)
+ super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
}
}
class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 90b94e1336..e5dbaa3fd5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -199,22 +199,11 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
)
- val docExternalUrls = MultiStringSetting (
- "-external-urls",
- "externalUrl(s)",
- "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated"
- )
-
val docGroups = BooleanSetting (
"-groups",
"Group similar functions together (based on the @group annotation)"
)
- // Somewhere slightly before r18708 scaladoc stopped building unless the
- // self-type check was suppressed. I hijacked the slotted-for-removal-anyway
- // suppress-vt-warnings option and renamed it for this purpose.
- noSelfCheck.value = true
-
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
@@ -249,19 +238,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
}
}
- def appendIndex(url: String): String = {
- val index = "/index.html"
- if (url.endsWith(index)) url else url + index
- }
-
- // Deprecated together with 'docExternalUrls' option.
- lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) {
- case (map, binding) =>
- val idx = binding indexOf "="
- val pkgs = binding substring (0, idx) split ":"
- val url = appendIndex(binding substring (idx + 1))
- map ++ (pkgs map (_ -> url))
- }
+ def appendIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/index.html"
lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
val idx = s.indexOf("#")
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index ef0a013ff2..cd1d604843 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -22,7 +22,7 @@ import scala.language.postfixOps
trait CommentFactoryBase { this: MemberLookupBase =>
val global: Global
- import global.{ reporter, Symbol }
+ import global.{ reporter, Symbol, NoSymbol }
/* Creates comments with necessary arguments */
def createComment (
@@ -186,7 +186,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
* @param comment The expanded comment string (including start and end markers) to be parsed.
* @param src The raw comment source string.
* @param pos The position of the comment in source. */
- protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = {
+ protected def parseAtSymbol(comment: String, src: String, pos: Position, site: Symbol = NoSymbol): Comment = {
/** The cleaned raw comment as a list of lines. Cleaning removes comment
* start and end markers, line start markers and unnecessary whitespace. */
def clean(comment: String): List[String] = {
@@ -312,7 +312,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*)
+ mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, site))} toSeq: _*)
def oneTag(key: SimpleTagKey): Option[Body] =
((bodyTags remove key): @unchecked) match {
@@ -345,7 +345,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
val com = createComment (
- body0 = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)),
+ body0 = Some(parseWikiAtSymbol(docBody.toString, pos, site)),
authors0 = allTags(SimpleTagKey("author")),
see0 = allTags(SimpleTagKey("see")),
result0 = oneTag(SimpleTagKey("return")),
@@ -385,14 +385,14 @@ trait CommentFactoryBase { this: MemberLookupBase =>
* - Removed start-of-line star and one whitespace afterwards (if present).
* - Removed all end-of-line whitespace.
* - Only `endOfLine` is used to mark line endings. */
- def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document()
+ def parseWikiAtSymbol(string: String, pos: Position, site: Symbol): Body = new WikiParser(string, pos, site).document()
/** TODO
*
* @author Ingo Maier
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
- protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki =>
+ protected final class WikiParser(val buffer: String, pos: Position, site: Symbol) extends CharReader(buffer) { wiki =>
var summaryParsed = false
def document(): Body = {
@@ -694,7 +694,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case (SchemeUri(uri), optTitle) =>
Link(uri, optTitle getOrElse Text(uri))
case (qualName, optTitle) =>
- makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt)
+ makeEntityLink(optTitle getOrElse Text(target), pos, target, site)
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index 671518fbc6..cc217d2f80 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -21,10 +21,10 @@ trait MemberLookupBase {
import global._
import rootMirror.{RootPackage, EmptyPackage}
- private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
+ private def isRoot(s: Symbol) = (s eq NoSymbol) || s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
- def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) =
- new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) }
+ def makeEntityLink(title: Inline, pos: Position, query: String, site: Symbol) =
+ new EntityLink(title) { lazy val link = memberLookup(pos, query, site) }
private var showExplanation = true
private def explanation: String =
@@ -45,18 +45,14 @@ trait MemberLookupBase {
| - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin
} else ""
- def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = {
+ def memberLookup(pos: Position, query: String, site: Symbol): LinkTo = {
val members = breakMembers(query)
// (1) First look in the root package, as most of the links are qualified
val fromRoot = lookupInRootPackage(pos, members)
// (2) Or recursively go into each containing template.
- val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s =>
- Stream.iterate(s)(_.owner)
- }.takeWhile (!isRoot(_)).map {
- lookupInTemplate(pos, members, _)
- }
+ val fromParents = Stream.iterate(site)(_.owner) takeWhile (!isRoot(_)) map (lookupInTemplate(pos, members, _))
val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 9edd5afa13..f6373e9e97 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -3,7 +3,9 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
@@ -40,8 +42,7 @@ abstract class HtmlPage extends Page { thisPage =>
def body: NodeSeq
def writeFor(site: HtmlFactory) {
- val doctype =
- DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil)
+ val doctype = DocType("html")
val html =
<html>
<head>
@@ -55,7 +56,6 @@ abstract class HtmlPage extends Page { thisPage =>
</html>
writeFile(site) { (w: Writer) =>
- w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
w.write(doctype.toString + "\n")
w.write(xml.Xhtml.toXhtml(html))
}
@@ -65,8 +65,6 @@ abstract class HtmlPage extends Page { thisPage =>
// we're only interested in the body, as this will go into the diff
_.write(body.text)
}
-
- //XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
}
/** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
index 91939cf3de..93950fd0a7 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
@@ -3,7 +3,8 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc.doc.html
+package scala
+package tools.nsc.doc.html
import scala.tools.nsc.doc.model._
import java.io.{FileOutputStream, File}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
index 348ea97c5b..910148532d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -3,7 +3,8 @@
* @author Stephane Micheloud
*/
-package scala.tools.nsc.doc.html
+package scala
+package tools.nsc.doc.html
import scala.xml.NodeSeq
import scala.annotation.tailrec
@@ -259,8 +260,8 @@ private[html] object SyntaxHigh {
parse(buf(i).toChar.toString, i+1)
case _ =>
if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
- if (Character.isDigit(buf(i)) ||
- (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
+ if (Character.isDigit(buf(i).toInt) ||
+ (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1).toInt))) {
val s = numlit(i)
parse("<span class=\"num\">"+s+"</span>", i+s.length)
} else {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index c034647320..ce3a5eb1fc 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -35,10 +35,10 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
val body =
<body>
<div id="library">
- <img class='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
- <img class='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
- <img class='object icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
- <img class='package icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
+ <img class='class icon' alt='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
+ <img class='trait icon' alt='trait icon' src={ relativeLinkTo{List("trait.png", "lib")} }/>
+ <img class='object icon' alt='trait icon' src={ relativeLinkTo{List("object.png", "lib")} }/>
+ <img class='package icon' alt='trait icon' src={ relativeLinkTo{List("package.png", "lib")} }/>
</div>
{ browser }
<div id="content" class="ui-layout-center">
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index a74c2eedbd..84ee82f994 100755
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -3,10 +3,13 @@
* @author Pedro Furlanetto
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
package page
+
import doc.model._
class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 63509de4b5..119d4e0143 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -3,7 +3,9 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
package page
@@ -15,6 +17,7 @@ import model._
import model.diagram._
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
import scala.language.postfixOps
+import scala.collection.mutable. { Set, HashSet }
import model._
import model.diagram._
@@ -636,13 +639,23 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
val subclasses = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
- <div class="toggleContainer block">
- <span class="toggle">Known Subclasses</span>
- <div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
- }</div>
- </div>
+ case dtpl: DocTemplateEntity if isSelf && !isReduced =>
+ val subs: Set[DocTemplateEntity] = HashSet.empty
+ def transitive(dtpl: DocTemplateEntity) {
+ for (sub <- dtpl.directSubClasses if !(subs contains sub)) {
+ subs add sub
+ transitive(sub)
+ }
+ }
+ transitive(dtpl)
+ if (subs.nonEmpty)
+ <div class="toggleContainer block">
+ <span class="toggle">Known Subclasses</span>
+ <div class="subClasses hiddenContent">{
+ templatesToHtml(subs.toList.sortBy(_.name), scala.xml.Text(", "))
+ }</div>
+ </div>
+ else NodeSeq.Empty
case _ => NodeSeq.Empty
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 7d146b4a5f..4ff436bdc6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -2,7 +2,9 @@
* @author Damien Obrist
* @author Vlad Ureche
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
package page
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
index 2fa1bf62f3..4bed106f43 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -98,7 +98,7 @@ class DotProcess(settings: doc.Settings) {
assert(!inputString.isSet)
assert(!outputString.isSet)
inputString.put(input)
- var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
+ var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000L)
if (error) result = null
result
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index 70073b272a..96689ae701 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -14,9 +14,9 @@ var title = $(document).attr('title');
var lastHash = "";
$(document).ready(function() {
- $('body').layout({
+ $('body').layout({
west__size: '20%',
- center__maskContents: true
+ center__maskContents: true
});
$('#browser').layout({
center__paneSelector: ".ui-west-center"
@@ -342,11 +342,6 @@ function configureTextFilter() {
if (event.keyCode == 27) { // escape
input.attr("value", "");
}
- if (event.keyCode == 9) { // tab
- $("#template").contents().find("#mbrsel-input").focus();
- input.attr("value", "");
- return false;
- }
if (event.keyCode == 40) { // down arrow
$(window).unbind("keydown");
keyboardScrolldownLeftPane();
@@ -354,6 +349,14 @@ function configureTextFilter() {
}
textFilter();
});
+ input.bind('keydown', function(event) {
+ if (event.keyCode == 9) { // tab
+ $("#template").contents().find("#mbrsel-input").focus();
+ input.attr("value", "");
+ return false;
+ }
+ textFilter();
+ });
input.focus(function(event) { input.select(); });
});
scheduler.add("init", function() {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
index 574d6b04f8..4e06e5bd16 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
@@ -24,31 +24,20 @@ trait CommentFactory extends base.CommentFactoryBase {
thisFactory: ModelFactory with CommentFactory with MemberLookup =>
val global: Global
- import global.{ reporter, definitions, Symbol }
+ import global.{ reporter, definitions, Symbol, NoSymbol }
- protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment]
+ protected val commentCache = mutable.HashMap.empty[(Symbol, DocTemplateImpl), Option[Comment]]
- def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = {
- commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None)
- sym
- }
-
- def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = {
- val key = (sym, inTpl)
- if (commentCache isDefinedAt key)
- Some(commentCache(key))
- else {
- val c = defineComment(sym, currentTpl, inTpl)
- if (c isDefined) commentCache += (sym, inTpl) -> c.get
- c
- }
- }
+ def comment(sym: Symbol, linkTarget: DocTemplateImpl, inTpl: DocTemplateImpl): Option[Comment] =
+ commentCache.getOrElseUpdate((sym, inTpl), {
+ defineComment(sym, linkTarget, inTpl)
+ })
/** A comment is usualy created by the parser, however for some special
* cases we have to give some `inTpl` comments (parent class for example)
* to the comment of the symbol.
* This function manages some of those cases : Param accessor and Primary constructor */
- def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = {
+ def defineComment(sym: Symbol, linkTarget: DocTemplateImpl, inTpl: DocTemplateImpl):Option[Comment] = {
//param accessor case
// We just need the @param argument, we put it into the body
@@ -85,8 +74,7 @@ trait CommentFactory extends base.CommentFactoryBase {
else {
val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
if (rawComment != "") {
- val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl)
- val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt)
+ val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), linkTarget)
Some(c)
}
else None
@@ -94,9 +82,9 @@ trait CommentFactory extends base.CommentFactoryBase {
}
- protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = {
- assert(!inTplOpt.isDefined || inTplOpt.get != null)
- parseAtSymbol(comment, src, pos, inTplOpt map (_.sym))
+ protected def parse(comment: String, src: String, pos: Position, linkTarget: DocTemplateImpl): Comment = {
+ val sym = if (linkTarget eq null) NoSymbol else linkTarget.sym
+ parseAtSymbol(comment, src, pos, sym)
}
/** Parses a string containing wiki syntax into a `Comment` object.
@@ -105,8 +93,8 @@ trait CommentFactory extends base.CommentFactoryBase {
* - Removed start-of-line star and one whitespace afterwards (if present).
* - Removed all end-of-line whitespace.
* - Only `endOfLine` is used to mark line endings. */
- def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = {
- assert(!inTplOpt.isDefined || inTplOpt.get != null)
- parseWikiAtSymbol(string,pos, inTplOpt map (_.sym))
+ def parseWiki(string: String, pos: Position, inTpl: DocTemplateImpl): Body = {
+ val sym = if (inTpl eq null) NoSymbol else inTpl.sym
+ parseWikiAtSymbol(string,pos, sym)
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index 924f203a59..6932f01e9a 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -258,10 +258,6 @@ trait DocTemplateEntity extends MemberTemplateEntity {
* This template's linearization contains all of its direct and indirect super-types. */
def linearizationTypes: List[TypeEntity]
- /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def allSubClasses: List[DocTemplateEntity]
-
/** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
* Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
def directSubClasses: List[DocTemplateEntity]
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 1272906df5..53410fd4ad 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -3,7 +3,8 @@
* @author Pedro Furlanetto
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package doc
package model
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 23259a4ae8..339129bdbc 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -49,13 +49,6 @@ trait MemberLookup extends base.MemberLookupBase {
settings.extUrlMapping get path map { url =>
LinkToExternal(name, url + "#" + name)
}
- } orElse {
- // Deprecated option.
- settings.extUrlPackageMapping find {
- case (pkg, _) => name startsWith pkg
- } map {
- case (_, url) => LinkToExternal(name, url + "#" + name)
- }
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index cc228082c1..c4e3c115be 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -1,6 +1,7 @@
/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
-package scala.tools.nsc
+package scala
+package tools.nsc
package doc
package model
@@ -9,11 +10,9 @@ import diagram._
import scala.collection._
import scala.util.matching.Regex
-
import symtab.Flags
import io._
-
import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
@@ -99,22 +98,21 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
+ // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
+ // in the doc comment of MyClass
+ def linkTarget: DocTemplateImpl = inTpl
+
lazy val comment = {
- // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
- // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
- // in the doc comment of MyClass
- val thisTpl = this match {
- case d: DocTemplateImpl => Some(d)
- case _ => None
- }
- if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None
+ val documented = if (sym.hasAccessorFlag) sym.accessed else sym
+ thisFactory.comment(documented, linkTarget, inTpl)
}
def group = comment flatMap (_.group) getOrElse defaultGroup
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
def inDefinitionTemplates =
if (inTpl == null)
- List(makeRootPackage)
+ docTemplatesCache(RootPackage) :: Nil
else
makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
def visibility = {
@@ -152,9 +150,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def deprecation =
if (sym.isDeprecated)
Some((sym.deprecationMessage, sym.deprecationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
- case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
- case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl))
+ case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, inTpl)
+ case (Some(msg), None) => parseWiki(msg, NoPosition, inTpl)
+ case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, inTpl)
case (None, None) => Body(Nil)
})
else
@@ -162,9 +160,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def migration =
if(sym.hasMigrationAnnotation)
Some((sym.migrationMessage, sym.migrationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
- case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
- case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl))
+ case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, inTpl)
+ case (Some(msg), None) => parseWiki(msg, NoPosition, inTpl)
+ case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, inTpl)
case (None, None) => Body(Nil)
})
else
@@ -220,7 +218,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
(name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links
}
// these only apply for NonTemplateMemberEntities
- def useCaseOf: Option[MemberEntity] = None
+ def useCaseOf: Option[MemberImpl] = None
def byConversion: Option[ImplicitConversionImpl] = None
def isImplicitlyInherited = false
def isShadowedImplicit = false
@@ -275,16 +273,17 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (settings.verbose)
inform("Creating doc template for " + sym)
+ override def linkTarget: DocTemplateImpl = this
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- protected def inSourceFromSymbol(symbol: Symbol) =
- if (symbol.sourceFile != null && ! symbol.isSynthetic)
- Some((symbol.sourceFile, symbol.pos.line))
+ protected def reprSymbol: Symbol = sym
+
+ def inSource =
+ if (reprSymbol.sourceFile != null && ! reprSymbol.isSynthetic)
+ Some((reprSymbol.sourceFile, reprSymbol.pos.line))
else
None
- def inSource = inSourceFromSymbol(sym)
-
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -306,20 +305,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else None
}
- protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
- symbol.ancestors map { ancestor =>
- val typeEntity = makeType(symbol.info.baseType(ancestor), this)
- val tmplEntity = makeTemplate(ancestor) match {
- case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl
- case tmpl => tmpl
- }
- (tmplEntity, typeEntity)
- }
- }
-
- lazy val linearization = linearizationFromSymbol(sym)
- def linearizationTemplates = linearization map { _._1 }
- def linearizationTypes = linearization map { _._2 }
+ lazy val (linearizationTemplates, linearizationTypes) =
+ reprSymbol.ancestors map { ancestor =>
+ (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
+ } unzip
/* Subclass cache */
private lazy val subClassesCache = (
@@ -330,8 +319,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (subClassesCache != null)
subClassesCache += sc
}
- def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
+ def directSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
/* Implcitly convertible class cache */
private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
@@ -387,12 +375,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this))
- // compute linearization to register subclasses
- linearization
outgoingImplicitlyConvertedClasses
+ for (pt <- sym.info.parents; parentTemplate <- findTemplateMaybe(pt.typeSymbol)) parentTemplate registerSubClass this
+
// the members generated by the symbols in memberSymsEager PLUS the members from the usecases
- val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct
+ val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf).distinct
implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
// finally, add the members generated by implicit conversions
members :::= conversions.flatMap(_.memberImpls)
@@ -472,33 +460,30 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val (inSource, linearization) = {
- val representive = sym.info.members.find {
- s => s.isPackageObject
- } getOrElse sym
- (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
- }
+ override def reprSymbol = sym.info.members.find (_.isPackageObject) getOrElse sym
+
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
- override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
+ override val useCaseOf: Option[MemberImpl], inTpl: DocTemplateImpl)
extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
override lazy val comment = {
- val inRealTpl =
- conversion.fold(Option(inTpl)) { conv =>
- /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
- * 1. the target of the implicit conversion
- * 2. the definition template (owner)
- * 3. the current template
- */
- findTemplateMaybe(conv.toType.typeSymbol) filterNot (_ == makeRootPackage) orElse (
- findTemplateMaybe(sym.owner) filterNot (_ == makeRootPackage) orElse Option(inTpl)
- )
- }
- inRealTpl flatMap (thisFactory.comment(sym, None, _))
+ def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] =
+ if (sym eq RootPackage) None else findTemplateMaybe(sym)
+ /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
+ * 1. the target of the implicit conversion
+ * 2. the definition template (owner)
+ * 3. the current template
+ */
+ val inRealTpl = conversion.flatMap { conv =>
+ nonRootTemplate(conv.toType.typeSymbol)
+ } orElse nonRootTemplate(sym.owner) orElse Option(inTpl)
+ inRealTpl flatMap { tpl =>
+ thisFactory.comment(sym, tpl, tpl)
+ }
}
override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates)
@@ -517,7 +502,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
- useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
+ useCaseOf: Option[MemberImpl], inTpl: DocTemplateImpl)
extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
def valueParams = {
val info = conversion.fold(sym.info)(_.toType memberInfo sym)
@@ -630,7 +615,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
import Streamable._
Path(settings.docRootContent.value) match {
case f : File => {
- val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
+ val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, inTpl))
Some(rootComment)
}
case _ => None
@@ -738,16 +723,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
-
// TODO: Should be able to override the type
def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = {
def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
- override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
- thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed.
override def isLazyVal = true
})
else if (bSym.isGetter && bSym.accessed.isMutable)
@@ -837,36 +818,36 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- /** */
def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
val aSym = annot.symbol
new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
lazy val annotationClass =
makeTemplate(annot.symbol)
- val arguments = { // lazy
- def annotArgs = annot.args match {
- case Nil => annot.assocs collect { case (_, LiteralAnnotArg(const)) => Literal(const) }
- case xs => xs
- }
- def noParams = annotArgs map (_ => None)
-
- val params: List[Option[ValueParam]] = annotationClass match {
+ val arguments = {
+ val paramsOpt: Option[List[ValueParam]] = annotationClass match {
case aClass: DocTemplateEntity with Class =>
- (aClass.primaryConstructor map { _.valueParams.head }) match {
- case Some(vps) => vps map { Some(_) }
- case _ => noParams
+ val constr = aClass.constructors collectFirst {
+ case c: MemberImpl if c.sym == annot.original.symbol => c
}
- case _ => noParams
+ constr flatMap (_.valueParams.headOption)
+ case _ => None
}
- assert(params.length == annotArgs.length, (params, annotArgs))
-
- params zip annotArgs flatMap { case (param, arg) =>
- makeTree(arg) map { tree =>
- new ValueArgument {
- def parameter = param
- def value = tree
+ val argTrees = annot.args map makeTree
+ paramsOpt match {
+ case Some (params) =>
+ params zip argTrees map { case (param, tree) =>
+ new ValueArgument {
+ def parameter = Some(param)
+ def value = tree
+ }
+ }
+ case None =>
+ argTrees map { tree =>
+ new ValueArgument {
+ def parameter = None
+ def value = tree
+ }
}
- }
}
}
}
@@ -906,9 +887,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
sym.name == aSym.name &&
sym.isParamWithDefault
)
- (unit.body find (t => isCorrespondingParam(t.symbol))) match {
- case Some(ValDef(_,_,_,rhs)) => makeTree(rhs)
- case _ => None
+ unit.body find (t => isCorrespondingParam(t.symbol)) collect {
+ case ValDef(_,_,_,rhs) if rhs ne EmptyTree => makeTree(rhs)
}
case _ => None
}
@@ -1041,4 +1021,3 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
{ val rawComment = global.expandedDocComment(bSym, inTpl.sym)
rawComment.contains("@template") || rawComment.contains("@documentable") }
}
-
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index 1f87f935f2..f984b4579f 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -170,7 +170,7 @@ trait ModelFactoryImplicitSupport {
val newContext = context.makeImplicit(context.ambiguousErrors)
newContext.macrosEnabled = false
val newTyper = global.analyzer.newTyper(newContext)
- newTyper.silent(_.typed(appliedTree, EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ newTyper.silent(_.typed(appliedTree), reportAmbiguousErrors = false) match {
case global.analyzer.SilentResultValue(t: Tree) => t
case global.analyzer.SilentTypeError(err) =>
@@ -226,12 +226,9 @@ trait ModelFactoryImplicitSupport {
// look for type variables in the type. If there are none, we can decide if the implicit is there or not
if (implType.isTrivial) {
try {
- context.flushBuffer() /* any errors here should not prevent future findings */
- // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
- val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
- val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
- context.flushBuffer() /* any errors here should not prevent future findings */
-
+ // TODO: Not sure if `owner = sym.owner` is the right thing to do -- seems similar to what scalac should be doing
+ val silentContext = context.make(owner = sym.owner).makeSilent(reportAmbiguousErrors = false)
+ val search = inferImplicit(EmptyTree, tpe, false, false, silentContext, false)
available = Some(search.tree != EmptyTree)
} catch {
case _: TypeError =>
@@ -288,7 +285,7 @@ trait ModelFactoryImplicitSupport {
(tparams zip constrs) flatMap {
case (tparam, constr) => {
uniteConstraints(constr) match {
- case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
+ case (loBounds, upBounds) => (loBounds filter (_ != NothingTpe), upBounds filter (_ != AnyTpe)) match {
case (Nil, Nil) =>
Nil
case (List(lo), List(up)) if (lo == up) =>
@@ -335,13 +332,12 @@ trait ModelFactoryImplicitSupport {
def targetType: TypeEntity = makeType(toType, inTpl)
- def convertorOwner: TemplateEntity =
- if (convSym != NoSymbol)
- makeTemplate(convSym.owner)
- else {
+ def convertorOwner: TemplateEntity = {
+ if (convSym eq NoSymbol)
error("Scaladoc implicits: " + toString + " = NoSymbol!")
- makeRootPackage
- }
+
+ makeTemplate(convSym.owner)
+ }
def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
@@ -536,7 +532,7 @@ trait ModelFactoryImplicitSupport {
object wildcardToNothing extends TypeMap {
def apply(tp: Type): Type = mapOver(tp) match {
case WildcardType =>
- NothingClass.tpe
+ NothingTpe
case other =>
other
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index b972649194..b381176b17 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -19,7 +19,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
val global: Global
import global._
- def makeTree(rhs: Tree): Option[TreeEntity] = {
+ def makeTree(rhs: Tree): TreeEntity = {
val expr = new StringBuilder
var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
@@ -80,17 +80,16 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
traverser.traverse(rhs)
- Some(new TreeEntity {
+ new TreeEntity {
val expression = expr.toString
val refEntity = refs
- })
+ }
}
- case pos: OffsetPosition =>
- Some(new TreeEntity {
+ case _ =>
+ new TreeEntity {
val expression = rhs.toString
val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
- })
- case _ => None
+ }
}
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index ebac25bbe4..87d7ece8f2 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -116,18 +116,27 @@ trait DiagramFactory extends DiagramDirectiveParser {
case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d
}
+ def listSuperClasses(member: MemberTemplateImpl) = {
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams.
+ (pack.sym, member.sym) match {
+ case (ScalaPackage, NullClass) =>
+ List(makeTemplate(AnyRefClass))
+ case (ScalaPackage, NothingClass) =>
+ (List(NullClass) ::: ScalaValueClasses) map { makeTemplate(_) }
+ case _ =>
+ member.parentTypes map {
+ case (template, tpe) => template
+ } filter {
+ nodesAll.contains(_)
+ }
+ }
+ }
+
// for each node, add its subclasses
for (node <- nodesAll if !classExcluded(node)) {
node match {
case dnode: MemberTemplateImpl =>
- var superClasses = dnode.parentTypes.map(_._1).filter(nodesAll.contains(_))
-
- // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams.
- if (pack.sym == ScalaPackage)
- if (dnode.sym == NullClass)
- superClasses = List(makeTemplate(AnyRefClass))
- else if (dnode.sym == NothingClass)
- superClasses = (List(NullClass) ::: ScalaValueClasses).map(makeTemplate(_))
+ val superClasses = listSuperClasses(dnode)
if (!superClasses.isEmpty) {
nodesShown += dnode
@@ -149,7 +158,14 @@ trait DiagramFactory extends DiagramDirectiveParser {
None
else {
val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_))
- val edges = edgesAll.map(pair => (mapNodes(pair._1), pair._2.map(mapNodes(_)))).filterNot(pair => pair._2.isEmpty)
+ val edges = edgesAll.map {
+ case (entity, superClasses) => {
+ (mapNodes(entity), superClasses flatMap { mapNodes.get(_) })
+ }
+ } filterNot {
+ case (node, superClassNodes) => superClassNodes.isEmpty
+ }
+
val diagram =
// TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
if (pack.sym == ScalaPackage) {
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index bb001623a8..9c72bdbf1e 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -6,7 +6,8 @@
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
class ByteArrayReader(content: Array[Byte]) {
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index 8082b6befe..f62df285f9 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -32,7 +32,7 @@ class Classfile(in: ByteArrayReader) {
var attribs: List[Attribute] = Nil
var i = 0
while (i < n) {
- attribs = Attribute(in.nextChar, in.nextBytes(in.nextInt)) :: attribs
+ attribs = Attribute(in.nextChar.toInt, in.nextBytes(in.nextInt)) :: attribs
i = i + 1
}
attribs
@@ -43,7 +43,7 @@ class Classfile(in: ByteArrayReader) {
var members: List[Member] = Nil
var i = 0
while (i < n) {
- members = Member(field, in.nextChar, in.nextChar, in.nextChar, readAttribs) :: members
+ members = Member(field, in.nextChar.toInt, in.nextChar.toInt, in.nextChar.toInt, readAttribs) :: members
i = i + 1
}
members
@@ -54,7 +54,7 @@ class Classfile(in: ByteArrayReader) {
var intfs: List[Int] = Nil
var i = 0
while (i < n) {
- intfs = in.nextChar :: intfs
+ intfs = in.nextChar.toInt :: intfs
i = i + 1
}
intfs
@@ -81,7 +81,7 @@ class Classfile(in: ByteArrayReader) {
case object Empty extends PoolEntry(0) { }
val entries = {
- val pool = new Array[PoolEntry](in.nextChar)
+ val pool = new Array[PoolEntry](in.nextChar.toInt)
var i = 1
while (i < pool.length) {
val tag = in.nextByte
@@ -92,7 +92,7 @@ class Classfile(in: ByteArrayReader) {
pool(i) = Empty
}
else pool(i) = tag match {
- case CONSTANT_UTF8 => UTF8(in.nextUTF8(in.nextChar))
+ case CONSTANT_UTF8 => UTF8(in.nextUTF8(in.nextChar.toInt))
case CONSTANT_UNICODE => in.skip(in.nextChar) ; Empty
case CONSTANT_CLASS => ClassRef(in.nextChar)
case CONSTANT_STRING => StringConst(in.nextChar)
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index 8254c2dfce..168050096d 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -6,7 +6,8 @@
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
import java.io._
@@ -97,9 +98,9 @@ class CodeWriter(writer: Writer) {
def print(value: Boolean): CodeWriter = print(String.valueOf(value))
- def print(value: Byte): CodeWriter = print(String.valueOf(value))
+ def print(value: Byte): CodeWriter = print(String.valueOf(value.toInt))
- def print(value: Short): CodeWriter = print(String.valueOf(value))
+ def print(value: Short): CodeWriter = print(String.valueOf(value.toInt))
def print(value: Char): CodeWriter = print(String.valueOf(value))
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 90f8cb8d71..5da4227e53 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -5,7 +5,8 @@
**
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
import scala.reflect.NameTransformer
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index 00678ab504..8b4ffb3efd 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -6,7 +6,8 @@
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
import java.io._
import java.util._
diff --git a/src/swing/scala/swing/Action.scala b/src/swing/scala/swing/Action.scala
index 8740f63e98..98b9fee2ee 100644
--- a/src/swing/scala/swing/Action.scala
+++ b/src/swing/scala/swing/Action.scala
@@ -35,7 +35,12 @@ object Action {
// TODO: we need an action cache
private var _action: Action = Action.NoAction
def action: Action = _action
- def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
+ def action_=(a: Action) {
+ _action = a;
+
+ import scala.language.reflectiveCalls
+ peer.setAction(a.peer)
+ }
//1.6: def hideActionText: Boolean = peer.getHideActionText
//def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
index ce2b3ba6fb..a075d22a7e 100644
--- a/src/swing/scala/swing/ComboBox.scala
+++ b/src/swing/scala/swing/ComboBox.scala
@@ -11,6 +11,7 @@ package scala.swing
import event._
import javax.swing.{JList, JComponent, JComboBox, JTextField, ComboBoxModel, AbstractListModel, ListCellRenderer}
import java.awt.event.ActionListener
+import scala.language.implicitConversions
object ComboBox {
/**
diff --git a/src/swing/scala/swing/Component.scala b/src/swing/scala/swing/Component.scala
index b7dd856d09..3a47784562 100644
--- a/src/swing/scala/swing/Component.scala
+++ b/src/swing/scala/swing/Component.scala
@@ -83,9 +83,9 @@ abstract class Component extends UIElement {
* Used by certain layout managers, e.g., BoxLayout or OverlayLayout to
* align components relative to each other.
*/
- def xLayoutAlignment: Double = peer.getAlignmentX
+ def xLayoutAlignment: Double = peer.getAlignmentX.toDouble
def xLayoutAlignment_=(x: Double) = peer.setAlignmentX(x.toFloat)
- def yLayoutAlignment: Double = peer.getAlignmentY
+ def yLayoutAlignment: Double = peer.getAlignmentY.toDouble
def yLayoutAlignment_=(y: Double) = peer.setAlignmentY(y.toFloat)
def border: Border = peer.getBorder
diff --git a/src/swing/scala/swing/GridBagPanel.scala b/src/swing/scala/swing/GridBagPanel.scala
index 7d181af4d8..c58d398737 100644
--- a/src/swing/scala/swing/GridBagPanel.scala
+++ b/src/swing/scala/swing/GridBagPanel.scala
@@ -9,6 +9,7 @@
package scala.swing
import java.awt.{GridBagConstraints, GridBagLayout}
+import scala.language.implicitConversions
object GridBagPanel {
object Fill extends Enumeration {
diff --git a/src/swing/scala/swing/Orientable.scala b/src/swing/scala/swing/Orientable.scala
index a73bafb9d3..db7cf09f27 100644
--- a/src/swing/scala/swing/Orientable.scala
+++ b/src/swing/scala/swing/Orientable.scala
@@ -12,7 +12,10 @@ package scala.swing
object Orientable {
trait Wrapper extends Oriented.Wrapper with Orientable {
- def orientation_=(o: Orientation.Value) { peer.setOrientation(o.id) }
+ def orientation_=(o: Orientation.Value) {
+ import scala.language.reflectiveCalls
+ peer.setOrientation(o.id)
+ }
}
}
diff --git a/src/swing/scala/swing/Oriented.scala b/src/swing/scala/swing/Oriented.scala
index 7996d21898..72d7d574e2 100644
--- a/src/swing/scala/swing/Oriented.scala
+++ b/src/swing/scala/swing/Oriented.scala
@@ -22,6 +22,7 @@ object Oriented {
def getOrientation(): Int
def setOrientation(n: Int)
}
+ import scala.language.reflectiveCalls
def orientation: Orientation.Value = Orientation(peer.getOrientation)
}
}
diff --git a/src/swing/scala/swing/Slider.scala b/src/swing/scala/swing/Slider.scala
index e329c31a01..241876587b 100644
--- a/src/swing/scala/swing/Slider.scala
+++ b/src/swing/scala/swing/Slider.scala
@@ -53,7 +53,7 @@ class Slider extends Component with Orientable.Wrapper with Publisher {
def labels: scala.collection.Map[Int, Label] = {
val labelTable = peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]]
- new scala.collection.JavaConversions.JMapWrapper(labelTable)
+ new scala.collection.convert.Wrappers.JMapWrapper(labelTable)
.mapValues(v => UIElement.cachedWrapper[Label](v))
}
def labels_=(l: scala.collection.Map[Int, Label]) {
diff --git a/src/swing/scala/swing/Swing.scala b/src/swing/scala/swing/Swing.scala
index cd5bbf2c4f..e2c8479354 100644
--- a/src/swing/scala/swing/Swing.scala
+++ b/src/swing/scala/swing/Swing.scala
@@ -14,6 +14,7 @@ import java.awt.event._
import javax.swing.event._
import javax.swing.border._
import javax.swing.{JComponent, Icon, BorderFactory, SwingUtilities}
+import scala.language.implicitConversions
/**
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 45497665d7..d5095f021b 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -14,9 +14,6 @@ package object swing {
type Image = java.awt.Image
type Font = java.awt.Font
- implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
- implicit lazy val implicitConversions = scala.language.implicitConversions
-
private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
private[swing] def toAnyRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
diff --git a/starr.number b/starr.number
index 7e6e869eaf..89659fcbf3 100644
--- a/starr.number
+++ b/starr.number
@@ -1 +1 @@
-starr.version=2.10.1 \ No newline at end of file
+starr.version=2.11.0-M2 \ No newline at end of file
diff --git a/test/build-partest.xml b/test/build-partest.xml
new file mode 100755
index 0000000000..44502ffa61
--- /dev/null
+++ b/test/build-partest.xml
@@ -0,0 +1,24 @@
+<project name="partest" basedir=".">
+ <dirname property="partest.basedir" file="${ant.file.partest}"/>
+ <property file="${partest.basedir}/included.properties"/>
+
+ <macrodef name="testSuite">
+ <attribute name="dir" default="${partest.basedir}/test"/>
+ <attribute name="srcdir" default="files"/> <!-- TODO: make targets for `pending` and other subdirs -->
+ <attribute name="colors" default="${partest.colors}"/>
+ <attribute name="scalacOpts" default="${scalac.args.optimise}"/>
+ <attribute name="kinds" default="pos neg run jvm res scalap scalacheck specialized instrumented presentation"/>
+ <sequential>
+ <property name="partest.dir" value="@{dir}" />
+ <partest srcdir="@{srcdir}"
+ kinds="@{kinds}"
+ colors="@{colors}"
+ scalacOpts="@{scalacOpts}"
+ compilationpathref="partest.classpath">
+ <compilationpath>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ </compilationpath>
+ </partest>
+ </sequential>
+ </macrodef>
+</project>
diff --git a/test/files/continuations-run/basics.scala b/test/files/continuations-run/basics.scala
index b63710bc64..ed1782d77a 100755
--- a/test/files/continuations-run/basics.scala
+++ b/test/files/continuations-run/basics.scala
@@ -13,7 +13,7 @@ object Test {
2 * shift((k:Int => Int) => k(k(7)))
}
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
println(reset(m0()))
println(reset(m1()))
diff --git a/test/files/continuations-run/function1.scala b/test/files/continuations-run/function1.scala
index fbd413ed9d..ec7ebb66a5 100644
--- a/test/files/continuations-run/function1.scala
+++ b/test/files/continuations-run/function1.scala
@@ -5,7 +5,7 @@ import scala.util.continuations._
object Test {
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
val f = () => shift { k: (Int=>Int) => k(7) }
val g: () => Int @cps[Int] = f
@@ -13,4 +13,4 @@ object Test {
println(reset(g()))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/function4.scala b/test/files/continuations-run/function4.scala
index 2ccd0b4ff2..232b1ec3a6 100644
--- a/test/files/continuations-run/function4.scala
+++ b/test/files/continuations-run/function4.scala
@@ -5,11 +5,11 @@ import scala.util.continuations._
object Test {
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
val g: () => Int @cps[Int] = () => shift { k: (Int=>Int) => k(7) }
println(reset(g()))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/function5.scala b/test/files/continuations-run/function5.scala
index fe528e14e7..17ae08c98e 100644
--- a/test/files/continuations-run/function5.scala
+++ b/test/files/continuations-run/function5.scala
@@ -5,11 +5,11 @@ import scala.util.continuations._
object Test {
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
val g: () => Int @cps[Int] = () => 7
println(reset(g()))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/function6.scala b/test/files/continuations-run/function6.scala
index 54a6ffcc93..6b4c5adc41 100644
--- a/test/files/continuations-run/function6.scala
+++ b/test/files/continuations-run/function6.scala
@@ -5,7 +5,7 @@ import scala.util.continuations._
object Test {
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
val g: PartialFunction[Int, Int @cps[Int]] = { case x => 7 }
@@ -13,4 +13,4 @@ object Test {
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/ifelse0.scala b/test/files/continuations-run/ifelse0.scala
index 2facab4b98..2c40f64959 100644
--- a/test/files/continuations-run/ifelse0.scala
+++ b/test/files/continuations-run/ifelse0.scala
@@ -10,9 +10,9 @@ object Test {
else
shift { k: (Int=>Int) => k(x) }
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(1 + test(7)))
println(reset(1 + test(8)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/ifelse1.scala b/test/files/continuations-run/ifelse1.scala
index c624b84b75..6f52a8c1e3 100644
--- a/test/files/continuations-run/ifelse1.scala
+++ b/test/files/continuations-run/ifelse1.scala
@@ -15,11 +15,11 @@ object Test {
else
shift { k: (Int=>Int) => k(k(k(x))) }
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(1 + test1(7)))
println(reset(1 + test1(8)))
println(reset(1 + test2(7)))
println(reset(1 + test2(8)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/ifelse2.scala b/test/files/continuations-run/ifelse2.scala
index 506acc4d00..f7f8328703 100644
--- a/test/files/continuations-run/ifelse2.scala
+++ b/test/files/continuations-run/ifelse2.scala
@@ -8,9 +8,9 @@ object Test {
def test(x:Int) = if (x <= 7)
shift { k: (Unit=>Unit) => println("abort") }
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset{ test(7); println("alive") })
println(reset{ test(8); println("alive") })
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/ifelse3.scala b/test/files/continuations-run/ifelse3.scala
index 54566a421c..7b85770be9 100644
--- a/test/files/continuations-run/ifelse3.scala
+++ b/test/files/continuations-run/ifelse3.scala
@@ -13,9 +13,9 @@ object Test {
x + 1
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(test(7)))
println(reset(test(8)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/ifelse4.scala b/test/files/continuations-run/ifelse4.scala
index 8360375283..a0dbcafc57 100644
--- a/test/files/continuations-run/ifelse4.scala
+++ b/test/files/continuations-run/ifelse4.scala
@@ -22,10 +22,10 @@ object Test {
if (sh(x1)==45) x1 else sh(x1)
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(1 + testA(7)))
println(reset(1 + testB(9)))
println(reset(1 + testC(9)))
println(reset(1 + testD(7)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/infer1.scala b/test/files/continuations-run/infer1.scala
index 10822508e7..1196bd40a8 100644
--- a/test/files/continuations-run/infer1.scala
+++ b/test/files/continuations-run/infer1.scala
@@ -17,7 +17,7 @@ object Test {
def util() = shift { k: (String => String) => "7" }
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
test { shift { k: (Int => String) => 9 } }
test { shift { k: (Int => String) => 9 }; 2 }
// test { shift { k: (Int => String) => 9 }; util() } <-- doesn't work
@@ -30,4 +30,4 @@ object Test {
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/match0.scala b/test/files/continuations-run/match0.scala
index b65d343c07..63b73bf21e 100644
--- a/test/files/continuations-run/match0.scala
+++ b/test/files/continuations-run/match0.scala
@@ -10,9 +10,9 @@ object Test {
case 8 => shift { k: (Int=>Int) => k(x) }
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(1 + test(7)))
println(reset(1 + test(8)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/match1.scala b/test/files/continuations-run/match1.scala
index 20671f26ba..62d639ee72 100644
--- a/test/files/continuations-run/match1.scala
+++ b/test/files/continuations-run/match1.scala
@@ -10,9 +10,9 @@ object Test {
case _ => x
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(1 + test(7)))
println(reset(1 + test(8)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/patvirt.scala b/test/files/continuations-run/patvirt.scala
index 5b4d312f20..7d769d46fe 100644
--- a/test/files/continuations-run/patvirt.scala
+++ b/test/files/continuations-run/patvirt.scala
@@ -25,7 +25,7 @@ object Test {
o7.get
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(1 + test(7)))
println(reset(1 + test(8)))
}
diff --git a/test/files/continuations-run/t1820.scala b/test/files/continuations-run/t1820.scala
index 893ddab6d1..7de6ccabef 100644
--- a/test/files/continuations-run/t1820.scala
+++ b/test/files/continuations-run/t1820.scala
@@ -10,5 +10,5 @@ object Test {
if (b) shifted
}
}
- def main(args: Array[String]) = test1(true)
-} \ No newline at end of file
+ def main(args: Array[String]): Unit = test1(true)
+}
diff --git a/test/files/continuations-run/t1821.scala b/test/files/continuations-run/t1821.scala
index 0d5fb553be..55bf7b6f12 100644
--- a/test/files/continuations-run/t1821.scala
+++ b/test/files/continuations-run/t1821.scala
@@ -11,10 +11,10 @@ object Test {
def test3[A](x: A): A @suspendable = x match { case x => x }
def test4[A](x: List[A]): A @suspendable = x match { case List(x) => x }
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
println(reset(test1()))
println(reset(test2(List(()))))
println(reset(test3()))
println(reset(test4(List(()))))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/t3199b.scala b/test/files/continuations-run/t3199b.scala
index 2122c963ac..fe1ecafe72 100644
--- a/test/files/continuations-run/t3199b.scala
+++ b/test/files/continuations-run/t3199b.scala
@@ -4,8 +4,8 @@ object Test {
java.util.Arrays.asList(Array(1,2,3):_*)
}
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
println(test())
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/t3223.scala b/test/files/continuations-run/t3223.scala
index efed1ff581..fda61f41ea 100644
--- a/test/files/continuations-run/t3223.scala
+++ b/test/files/continuations-run/t3223.scala
@@ -6,7 +6,7 @@ object Test {
throw new Exception
shiftUnit0[Int,Int](7)
} catch {
- case ex =>
+ case ex: Throwable =>
val g = (a:Int)=>a
9
}
@@ -16,4 +16,4 @@ object Test {
println(reset(foo(0)))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/t3225.scala b/test/files/continuations-run/t3225.scala
index 5b6259c43f..bc6d635c88 100644
--- a/test/files/continuations-run/t3225.scala
+++ b/test/files/continuations-run/t3225.scala
@@ -48,7 +48,7 @@ object Test {
// TODO: check whether this also applies to a::shift { k => ... }
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
testMono()
testPoly()
}
diff --git a/test/files/continuations-run/t5314.check b/test/files/continuations-run/t5314.check
index 4b35d8e6d0..61f5b25dc7 100644
--- a/test/files/continuations-run/t5314.check
+++ b/test/files/continuations-run/t5314.check
@@ -1,3 +1,9 @@
+t5314.scala:34: warning: expression 7 is cps-transformed unexpectedly
+ def bar3(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else return foo2(x) }
+ ^
+t5314.scala:35: warning: expression 7 is cps-transformed unexpectedly
+ def bar4(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else foo2(x) }
+ ^
7
7
7
diff --git a/test/files/continuations-run/t5472.check b/test/files/continuations-run/t5472.check
index d42e80c18e..3192c746eb 100644
--- a/test/files/continuations-run/t5472.check
+++ b/test/files/continuations-run/t5472.check
@@ -1 +1,15 @@
+t5472.scala:12: warning: expression case4(){
+ if (x1.ne(null))
+ matchEnd3(true)
+ else
+ case5()
+} is cps-transformed unexpectedly
+ (location, accessors) <- new ContinuationizedParallelIterable(map)
+ ^
+t5472.scala:12: warning: expression matchEnd3(x: Int){
+ x
+} is cps-transformed unexpectedly
+ (location, accessors) <- new ContinuationizedParallelIterable(map)
+ ^
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
List(23, 23)
diff --git a/test/files/continuations-run/t5472.scala b/test/files/continuations-run/t5472.scala
index 3e3c76b32a..060d267544 100644
--- a/test/files/continuations-run/t5472.scala
+++ b/test/files/continuations-run/t5472.scala
@@ -2,6 +2,7 @@ import scala.annotation._
import scala.util.continuations._
import java.util.concurrent.atomic._
+@deprecated("Suppress warnings", since="2.11")
object Test {
def main(args: Array[String]) {
val map = Map("foo" -> 1, "bar" -> 2)
@@ -15,7 +16,6 @@ object Test {
println(mapped.toList)
}
}
-}
final class ContinuationizedParallelIterable[+A](protected val underline: Iterable[A]) {
def toList = underline.toList.sortBy(_.toString)
@@ -76,7 +76,7 @@ final class ContinuationizedParallelIterable[+A](protected val underline: Iterab
new AtomicInteger(underline.size) with ((ContinuationizedParallelIterable[B] => Unit) => Unit) {
override final def apply(continue: ContinuationizedParallelIterable[B] => Unit) {
val results = new Array[B](super.get)
- for ((element, i) <- underline.view zipWithIndex) {
+ for ((element, i) <- underline.view.zipWithIndex) {
reset {
val result = f(element)
results(i) = result
@@ -88,3 +88,4 @@ final class ContinuationizedParallelIterable[+A](protected val underline: Iterab
}
})
}
+}
diff --git a/test/files/continuations-run/t5506.scala b/test/files/continuations-run/t5506.scala
index 2b5c1118f7..4b3703c725 100644
--- a/test/files/continuations-run/t5506.scala
+++ b/test/files/continuations-run/t5506.scala
@@ -42,7 +42,7 @@ z
}
-def main(args: Array[String]) = {
+def main(args: Array[String]) {
reset {
println(fp10)
println(fp11)
diff --git a/test/files/continuations-run/t5538.check b/test/files/continuations-run/t5538.check
index 457721d5e0..d9d873663d 100644
--- a/test/files/continuations-run/t5538.check
+++ b/test/files/continuations-run/t5538.check
@@ -1 +1,2 @@
+warning: there were 1 feature warning(s); re-run with -feature for details
Future(Future(Future(Future(Future(List(1, 2, 3, 4, 5))))))
diff --git a/test/files/continuations-run/t5538.scala b/test/files/continuations-run/t5538.scala
index 42f8163caf..6713cecf2f 100644
--- a/test/files/continuations-run/t5538.scala
+++ b/test/files/continuations-run/t5538.scala
@@ -1,6 +1,8 @@
import scala.util.continuations._
import scala.collection.generic.CanBuildFrom
+import scala.language.{ implicitConversions }
+
object Test {
class ExecutionContext
@@ -43,8 +45,8 @@ object Test {
}
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
val p = new PromiseStream[Int]
println(Future.flow(p << (Future(1), Future(2), Future(3), Future(4), Future(5))))
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/trycatch0.scala b/test/files/continuations-run/trycatch0.scala
index ec39863f3f..feb7f7182b 100644
--- a/test/files/continuations-run/trycatch0.scala
+++ b/test/files/continuations-run/trycatch0.scala
@@ -7,14 +7,14 @@ object Test {
def foo = try {
shift((k: Int=>Int) => k(7))
} catch {
- case ex =>
+ case ex: Throwable =>
9
}
def bar = try {
7
} catch {
- case ex =>
+ case ex: Throwable =>
shiftUnit0[Int,Int](9)
}
@@ -22,4 +22,4 @@ object Test {
println(reset { foo + 3 })
println(reset { bar + 3 })
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/trycatch1.check b/test/files/continuations-run/trycatch1.check
index a028d2b1e1..061ecdcd54 100644
--- a/test/files/continuations-run/trycatch1.check
+++ b/test/files/continuations-run/trycatch1.check
@@ -1,4 +1,10 @@
+trycatch1.scala:34: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 7
+ ^
+trycatch1.scala:18: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ shift((k: Int=>Int) => k(7))
+ ^
+12
12
12
12
-12 \ No newline at end of file
diff --git a/test/files/continuations-run/trycatch1.scala b/test/files/continuations-run/trycatch1.scala
index 10dfd30bb2..b00a87d059 100644
--- a/test/files/continuations-run/trycatch1.scala
+++ b/test/files/continuations-run/trycatch1.scala
@@ -10,7 +10,7 @@ object Test {
fatal
shift((k: Int=>Int) => k(7))
} catch {
- case ex =>
+ case ex: Throwable =>
9
}
@@ -18,7 +18,7 @@ object Test {
shift((k: Int=>Int) => k(7))
fatal
} catch {
- case ex =>
+ case ex: Throwable =>
9
}
@@ -26,7 +26,7 @@ object Test {
fatal
7
} catch {
- case ex =>
+ case ex: Throwable =>
shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
}
@@ -34,7 +34,7 @@ object Test {
7
fatal
} catch {
- case ex =>
+ case ex: Throwable =>
shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
}
@@ -45,4 +45,4 @@ object Test {
println(reset { bar2 + 3 })
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/while0.scala b/test/files/continuations-run/while0.scala
index 46005a4a77..542408fb24 100644
--- a/test/files/continuations-run/while0.scala
+++ b/test/files/continuations-run/while0.scala
@@ -15,8 +15,8 @@ object Test {
println(x)
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
reset(test())
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/while1.scala b/test/files/continuations-run/while1.scala
index fd41ab36ee..351d52c27a 100644
--- a/test/files/continuations-run/while1.scala
+++ b/test/files/continuations-run/while1.scala
@@ -15,8 +15,8 @@ object Test {
println(x)
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
reset(test())
}
-} \ No newline at end of file
+}
diff --git a/test/files/continuations-run/while2.scala b/test/files/continuations-run/while2.scala
index 63f9cb99fe..f4991dea61 100644
--- a/test/files/continuations-run/while2.scala
+++ b/test/files/continuations-run/while2.scala
@@ -16,8 +16,8 @@ object Test {
println(x)
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
reset(test())
}
-} \ No newline at end of file
+}
diff --git a/test/files/filters b/test/files/filters
new file mode 100644
index 0000000000..9a9b439784
--- /dev/null
+++ b/test/files/filters
@@ -0,0 +1,3 @@
+#
+#Java HotSpot(TM) 64-Bit Server VM warning: Failed to reserve shared memory (errno = 28).
+Java HotSpot\(TM\) .* warning:
diff --git a/test/files/jvm/actor-exceptions.scala b/test/files/jvm/actor-exceptions.scala
index 3ee4db9ed2..bdd983a0e8 100644
--- a/test/files/jvm/actor-exceptions.scala
+++ b/test/files/jvm/actor-exceptions.scala
@@ -1,4 +1,7 @@
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, Exit}
import Actor._
@@ -58,7 +61,6 @@ object Slave extends Actor {
case object A
-object Test {
def main(args: Array[String]) {
Master.start()
}
diff --git a/test/files/jvm/actor-executor.scala b/test/files/jvm/actor-executor.scala
index b1f9caebdd..0fc28b4d85 100644
--- a/test/files/jvm/actor-executor.scala
+++ b/test/files/jvm/actor-executor.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import java.util.concurrent.Executors
import scala.actors.{Actor, SchedulerAdapter}
import Actor._
@@ -50,7 +54,6 @@ object Two extends AdaptedActor {
}
}
-object Test {
val executor =
Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
index f8fcaef69f..5badf2ae7e 100644
--- a/test/files/jvm/actor-executor2.scala
+++ b/test/files/jvm/actor-executor2.scala
@@ -1,3 +1,8 @@
+
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, SchedulerAdapter, Exit}
import Actor._
import java.util.concurrent.{Executors, RejectedExecutionException}
@@ -48,7 +53,6 @@ trait AdaptedActor extends Actor {
Test.scheduler
}
-object Test {
val NUM_MSG = 100000
val executor =
diff --git a/test/files/jvm/actor-executor3.scala b/test/files/jvm/actor-executor3.scala
index 4fde2c6c5f..f8b57d84b3 100644
--- a/test/files/jvm/actor-executor3.scala
+++ b/test/files/jvm/actor-executor3.scala
@@ -1,3 +1,8 @@
+
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.Actor
import scala.actors.scheduler.ExecutorScheduler
import java.util.concurrent.Executors
@@ -48,7 +53,6 @@ trait AdaptedActor extends Actor {
Test.scheduler
}
-object Test {
val NUM_MSG = 100000
val executor =
diff --git a/test/files/jvm/actor-getstate.scala b/test/files/jvm/actor-getstate.scala
index a6e15a8721..425efbe5e6 100644
--- a/test/files/jvm/actor-getstate.scala
+++ b/test/files/jvm/actor-getstate.scala
@@ -1,7 +1,9 @@
-import scala.actors.{Reactor, Actor, TIMEOUT}
-import Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.{Reactor, Actor, TIMEOUT}
+ import Actor._
def assert(cond: => Boolean, hint: String) {
if (!cond)
diff --git a/test/files/jvm/actor-link-getstate.scala b/test/files/jvm/actor-link-getstate.scala
index c24daf2eff..d8b8ada1e6 100644
--- a/test/files/jvm/actor-link-getstate.scala
+++ b/test/files/jvm/actor-link-getstate.scala
@@ -1,5 +1,9 @@
-import scala.actors.{Actor, Exit}
-import scala.actors.Actor._
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
+ import scala.actors.{Actor, Exit}
+ import scala.actors.Actor._
case class MyException(text: String) extends Exception(text) {
override def fillInStackTrace() = this
@@ -39,7 +43,6 @@ object Master extends Actor {
}
}
-object Test {
def main(args: Array[String]) {
actor {
diff --git a/test/files/jvm/actor-looping.scala b/test/files/jvm/actor-looping.scala
index 475d4754ce..7bc6f1e5c5 100644
--- a/test/files/jvm/actor-looping.scala
+++ b/test/files/jvm/actor-looping.scala
@@ -1,7 +1,8 @@
-import scala.actors.Actor._
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Actor._
case object A
def main(args: Array[String]) {
diff --git a/test/files/jvm/actor-normal-exit.scala b/test/files/jvm/actor-normal-exit.scala
index 20863d5bb0..90495866e2 100644
--- a/test/files/jvm/actor-normal-exit.scala
+++ b/test/files/jvm/actor-normal-exit.scala
@@ -1,7 +1,9 @@
-import scala.actors.{Actor, Exit}
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.{Actor, Exit}
object Master extends Actor {
trapExit = true
def act() {
diff --git a/test/files/jvm/actor-receivewithin.scala b/test/files/jvm/actor-receivewithin.scala
index a5c87c2722..5982462502 100644
--- a/test/files/jvm/actor-receivewithin.scala
+++ b/test/files/jvm/actor-receivewithin.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, TIMEOUT}
object A extends Actor {
@@ -62,7 +66,6 @@ object B extends Actor {
}
}
-object Test {
def main(args:Array[String]) {
B.start()
}
diff --git a/test/files/jvm/actor-sync-send-timeout.scala b/test/files/jvm/actor-sync-send-timeout.scala
index 21e624bd0a..42e0be4a8c 100644
--- a/test/files/jvm/actor-sync-send-timeout.scala
+++ b/test/files/jvm/actor-sync-send-timeout.scala
@@ -1,8 +1,9 @@
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.Actor
/* This test is a regression test for SI-4759.
*/
-object Test {
val Runs = 5
def main(args: Array[String]) = {
@@ -14,7 +15,6 @@ object Test {
}
//println("done sending to A1")
}
-}
object A2 extends Actor {
this.start()
@@ -45,3 +45,4 @@ object A1 extends Actor {
}
}
}
+}
diff --git a/test/files/jvm/actor-termination.scala b/test/files/jvm/actor-termination.scala
index d8e44a2797..4a6bf92d48 100644
--- a/test/files/jvm/actor-termination.scala
+++ b/test/files/jvm/actor-termination.scala
@@ -1,8 +1,9 @@
-import scala.actors.Actor
/* Test that an actor that hasn't finished prevents termination */
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Actor
def main(args: Array[String]) {
Actor.actor {
try {
diff --git a/test/files/jvm/actor-uncaught-exception.scala b/test/files/jvm/actor-uncaught-exception.scala
index 5ae66de640..5285b7519f 100644
--- a/test/files/jvm/actor-uncaught-exception.scala
+++ b/test/files/jvm/actor-uncaught-exception.scala
@@ -1,10 +1,11 @@
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, Exit}
class MyException(msg: String) extends Exception(msg) {
override def fillInStackTrace() = this
}
-object Test {
case object StartError extends Actor {
def act() {
diff --git a/test/files/jvm/actor-uncaught-exception2.check b/test/files/jvm/actor-uncaught-exception2.check
index 870a5d32f9..a54f374aed 100644
--- a/test/files/jvm/actor-uncaught-exception2.check
+++ b/test/files/jvm/actor-uncaught-exception2.check
@@ -1,2 +1,2 @@
-UncaughtException(StartError,None,None,MyException: I don't want to run!)
-UncaughtException(MessageError,Some('ping),Some(Supervisor),MyException: No message for me!)
+UncaughtException(StartError,None,None,Test$MyException: I don't want to run!)
+UncaughtException(MessageError,Some('ping),Some(Supervisor),Test$MyException: No message for me!)
diff --git a/test/files/jvm/actor-uncaught-exception2.scala b/test/files/jvm/actor-uncaught-exception2.scala
index 0364cbeb03..f56217c187 100644
--- a/test/files/jvm/actor-uncaught-exception2.scala
+++ b/test/files/jvm/actor-uncaught-exception2.scala
@@ -1,11 +1,11 @@
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, Exit, Debug}
class MyException(msg: String) extends Exception(msg) {
override def fillInStackTrace() = this
}
-object Test {
-
case object StartError extends Actor {
def act() {
try {
diff --git a/test/files/jvm/annotations.scala b/test/files/jvm/annotations.scala
index 77a45fae89..c42eceef4c 100644
--- a/test/files/jvm/annotations.scala
+++ b/test/files/jvm/annotations.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, reflectiveCalls }
+
object Test1 {
class Foo {
@remote
diff --git a/test/files/jvm/daemon-actor-termination.scala b/test/files/jvm/daemon-actor-termination.scala
index 6ddfc3139d..40acd8583e 100644
--- a/test/files/jvm/daemon-actor-termination.scala
+++ b/test/files/jvm/daemon-actor-termination.scala
@@ -1,8 +1,10 @@
-import scala.actors.{Actor, DaemonActor}
/* Test that a daemon Actor that hasn't finished does not prevent termination */
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.{Actor, DaemonActor}
class MyDaemon extends DaemonActor {
def act() {
try {
diff --git a/test/files/jvm/deprecation.check b/test/files/jvm/deprecation.check
new file mode 100644
index 0000000000..e263e4909d
--- /dev/null
+++ b/test/files/jvm/deprecation.check
@@ -0,0 +1,3 @@
+warning: there were 5 deprecation warning(s); re-run with -deprecation for details
+Note: deprecation/Use_2.java uses or overrides a deprecated API.
+Note: Recompile with -Xlint:deprecation for details.
diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala
index b2573448c7..3bc8a2c100 100644
--- a/test/files/jvm/duration-tck.scala
+++ b/test/files/jvm/duration-tck.scala
@@ -6,6 +6,8 @@ import scala.concurrent.duration._
import scala.reflect._
import scala.tools.partest.TestUtil.intercept
+import scala.language.{ postfixOps }
+
object Test extends App {
implicit class Assert(val left: Any) extends AnyVal {
diff --git a/test/files/jvm/future-alarm.scala b/test/files/jvm/future-alarm.scala
index 8ee902b5ea..3e71fa681c 100644
--- a/test/files/jvm/future-alarm.scala
+++ b/test/files/jvm/future-alarm.scala
@@ -1,6 +1,8 @@
-import scala.actors.Futures
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Futures
def main(args: Array[String]) {
try {
for (i <- 1 to 100000) {
diff --git a/test/files/jvm/future-awaitall-zero.scala b/test/files/jvm/future-awaitall-zero.scala
index cd6ba172cc..56f4bab16f 100644
--- a/test/files/jvm/future-awaitall-zero.scala
+++ b/test/files/jvm/future-awaitall-zero.scala
@@ -1,7 +1,9 @@
-import scala.actors.Futures._
-import scala.actors.Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Futures._
+ import scala.actors.Actor._
def main(args: Array[String]) {
try {
val ft1 = future { reactWithin(10000) {
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index 01c9cf82ba..7deb4929d4 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -71,6 +71,25 @@ object FutureTests extends MinimalScalaTest {
}
}
+ "The Future companion object" should {
+ "call ExecutionContext.prepare on apply" in {
+ val p = Promise[Boolean]()
+ val ec = new ExecutionContext {
+ val delegate = ExecutionContext.global
+ override def prepare(): ExecutionContext = {
+ p.success(true)
+ delegate.prepare
+ }
+ override def execute(r: Runnable) = delegate.execute(r)
+ override def reportFailure(t: Throwable): Unit = delegate.reportFailure(t)
+ }
+
+ val f = Future("foo")(ec)
+ Await.result(f, defaultTimeout) mustBe ("foo")
+ Await.result(p.future, defaultTimeout) mustBe (true)
+ }
+ }
+
"The default ExecutionContext" should {
"report uncaught exceptions" in {
val p = Promise[Throwable]()
diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala
index 90048ccda0..e000431dda 100644
--- a/test/files/jvm/future-spec/main.scala
+++ b/test/files/jvm/future-spec/main.scala
@@ -17,6 +17,12 @@ object Test {
}
+trait Features {
+ implicit def implicitously = scala.language.implicitConversions
+ implicit def reflectively = scala.language.reflectiveCalls
+ implicit def postulously = scala.language.postfixOps
+}
+
trait Output {
val buffer = new StringBuilder
@@ -27,7 +33,7 @@ trait Output {
}
-trait MinimalScalaTest extends Output {
+trait MinimalScalaTest extends Output with Features {
val throwables = mutable.ArrayBuffer[Throwable]()
@@ -70,7 +76,7 @@ trait MinimalScalaTest extends Output {
throw new Exception("Exception of type %s was not thrown".format(manifest[T]))
} catch {
case t: Throwable =>
- if (manifest[T].erasure != t.getClass) throw t
+ if (manifest[T].runtimeClass != t.getClass) throw t
else t.asInstanceOf[T]
}
}
diff --git a/test/files/jvm/future-termination.scala b/test/files/jvm/future-termination.scala
index f51642cb7a..668b3f74ba 100644
--- a/test/files/jvm/future-termination.scala
+++ b/test/files/jvm/future-termination.scala
@@ -1,8 +1,10 @@
-import scala.actors.Futures
/* Test that unevaluated futures do not prevent program termination */
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Futures
def main(args: Array[String]) {
try {
val meaningOfLife = Futures.future {
diff --git a/test/files/jvm/reactor-exceptionOnSend.scala b/test/files/jvm/reactor-exceptionOnSend.scala
index 3d9a042131..6d79fc9d13 100644
--- a/test/files/jvm/reactor-exceptionOnSend.scala
+++ b/test/files/jvm/reactor-exceptionOnSend.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.Reactor
import scala.actors.Actor._
@@ -48,7 +52,6 @@ object B extends Reactor[Any] {
}
}
-object Test {
def main(args: Array[String]) {
B.start()
}
diff --git a/test/files/jvm/reactor-producer-consumer.scala b/test/files/jvm/reactor-producer-consumer.scala
index 8a6b17c3ad..ec34febe01 100644
--- a/test/files/jvm/reactor-producer-consumer.scala
+++ b/test/files/jvm/reactor-producer-consumer.scala
@@ -1,6 +1,8 @@
-import scala.actors.Reactor
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Reactor
case class Stop()
case class Get(from: Reactor[Any])
case class Put(x: Int)
diff --git a/test/files/jvm/reactor.scala b/test/files/jvm/reactor.scala
index dbc9a6bdda..91ded27f07 100644
--- a/test/files/jvm/reactor.scala
+++ b/test/files/jvm/reactor.scala
@@ -1,3 +1,11 @@
+/**
+ * Ping pong example for Reactor.
+ *
+ * @author Philipp Haller
+ */
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.Reactor
@@ -5,19 +13,12 @@ case class Ping(from: Reactor[Any])
case object Pong
case object Stop
-/**
- * Ping pong example for Reactor.
- *
- * @author Philipp Haller
- */
-object Test {
def main(args: Array[String]) {
val pong = new PongActor
val ping = new PingActor(100000, pong)
ping.start
pong.start
}
-}
class PingActor(count: Int, pong: Reactor[Any]) extends Reactor[Any] {
def act() {
@@ -68,3 +69,4 @@ class PongActor extends Reactor[Any] {
}
}
}
+}
diff --git a/test/files/jvm/replyablereactor.scala b/test/files/jvm/replyablereactor.scala
index e1fabc98da..4c4e13d9ab 100644
--- a/test/files/jvm/replyablereactor.scala
+++ b/test/files/jvm/replyablereactor.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.ReplyReactor
class MyActor extends ReplyReactor {
@@ -18,7 +22,6 @@ class MyActor extends ReplyReactor {
}
}
-object Test {
def main(args: Array[String]) {
val a = new MyActor
a.start()
diff --git a/test/files/jvm/replyablereactor2.scala b/test/files/jvm/replyablereactor2.scala
index da9e0e269e..21f33cce56 100644
--- a/test/files/jvm/replyablereactor2.scala
+++ b/test/files/jvm/replyablereactor2.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors._
import scala.actors.Actor._
@@ -19,7 +23,6 @@ class MyActor extends ReplyReactor {
}
}
-object Test {
def main(args: Array[String]) {
val a = new MyActor
a.start()
diff --git a/test/files/jvm/replyablereactor3.scala b/test/files/jvm/replyablereactor3.scala
index 2c26b8a176..5810ed053f 100644
--- a/test/files/jvm/replyablereactor3.scala
+++ b/test/files/jvm/replyablereactor3.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors._
import scala.actors.Actor._
@@ -19,7 +23,6 @@ class MyActor extends ReplyReactor {
}
}
-object Test {
def main(args: Array[String]) {
val a = new MyActor
a.start()
diff --git a/test/files/jvm/replyablereactor4.scala b/test/files/jvm/replyablereactor4.scala
index 8776cf63f0..95d63684dd 100644
--- a/test/files/jvm/replyablereactor4.scala
+++ b/test/files/jvm/replyablereactor4.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors._
import scala.actors.Actor._
@@ -19,7 +23,6 @@ class MyActor extends ReplyReactor {
}
}
-object Test {
def main(args: Array[String]) {
val a = new MyActor
a.start()
diff --git a/test/files/jvm/replyreactor-react-sender.scala b/test/files/jvm/replyreactor-react-sender.scala
index c9884295f6..fdcea09035 100644
--- a/test/files/jvm/replyreactor-react-sender.scala
+++ b/test/files/jvm/replyreactor-react-sender.scala
@@ -1,7 +1,9 @@
-import scala.actors.ReplyReactor
-import scala.actors.Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.ReplyReactor
+ import scala.actors.Actor._
val NUM = 2000
diff --git a/test/files/jvm/replyreactor.scala b/test/files/jvm/replyreactor.scala
index 0cecf29ec7..7512fb0eb2 100644
--- a/test/files/jvm/replyreactor.scala
+++ b/test/files/jvm/replyreactor.scala
@@ -1,6 +1,8 @@
-import scala.actors.ReplyReactor
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.ReplyReactor
def main(args: Array[String]) {
val a = new ReplyReactor {
def act() {
diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check
new file mode 100644
index 0000000000..18fc456acb
--- /dev/null
+++ b/test/files/jvm/scala-concurrent-tck.check
@@ -0,0 +1,3 @@
+scala-concurrent-tck.scala:429: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ f
+ ^
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index b2b4183564..438ee310e6 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -728,7 +728,7 @@ trait Blocking extends TestBase {
Await.result(f, Duration(500, "ms"))
assert(false)
} catch {
- case t =>
+ case t: Throwable =>
assert(t == cause)
done()
}
diff --git a/test/files/jvm/scheduler-adapter.scala b/test/files/jvm/scheduler-adapter.scala
index d6a8a446a0..1c9cfe7019 100644
--- a/test/files/jvm/scheduler-adapter.scala
+++ b/test/files/jvm/scheduler-adapter.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, SchedulerAdapter}
trait AdaptedActor extends Actor {
@@ -36,7 +40,6 @@ object Two extends AdaptedActor {
}
}
-object Test {
val adapted =
new SchedulerAdapter {
def execute(block: => Unit) {
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index f2c47aad77..a64b7115fa 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -281,6 +281,7 @@ object Test2_immutable {
//############################################################################
// Test classes in package "scala.collection.mutable"
+@deprecated("Suppress warnings", since="2.11")
object Test3_mutable {
import scala.reflect.ClassManifest
import scala.collection.mutable.{
@@ -593,6 +594,7 @@ object Test8 {
//############################################################################
// Test code
+@deprecated("Suppress warnings", since="2.11")
object Test {
def main(args: Array[String]) {
Test1_scala
@@ -678,7 +680,7 @@ object Test10_util {
import scala.util.Random
def rep[A](n: Int)(f: => A) { if (n > 0) { f; rep(n-1)(f) } }
- try {
+ {
val random = new Random(345)
val random2: Random = read(write(random))
rep(5) { assert(random.nextInt == random2.nextInt) }
diff --git a/test/files/jvm/stringbuilder.scala b/test/files/jvm/stringbuilder.scala
index 54951d657b..1fbf61aeab 100644
--- a/test/files/jvm/stringbuilder.scala
+++ b/test/files/jvm/stringbuilder.scala
@@ -2,6 +2,7 @@
*
* @author Stephane Micheloud
*/
+import scala.language.{ postfixOps }
object Test {
def main(args: Array[String]) {
Test1.run() //ctor, reverse
diff --git a/test/files/jvm/t1449.scala b/test/files/jvm/t1449.scala
index 3822cf7dd5..7917d6f6d5 100644
--- a/test/files/jvm/t1449.scala
+++ b/test/files/jvm/t1449.scala
@@ -1,7 +1,10 @@
-import scala.actors.Actor._
-import scala.actors.Future
-import scala.actors.Futures._
+
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Actor._
+ import scala.actors.Future
+ import scala.actors.Futures._
def main(args: Array[String]) {
val a = actor {
try {
diff --git a/test/files/jvm/t1948.scala b/test/files/jvm/t1948.scala
index 084c956398..95777b8037 100644
--- a/test/files/jvm/t1948.scala
+++ b/test/files/jvm/t1948.scala
@@ -1,7 +1,9 @@
-import scala.actors._
-import scala.actors.Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors._
+ import scala.actors.Actor._
def main (args: Array[String]) {
val actors = (1 to 1000).toList map { x => actor {
diff --git a/test/files/jvm/t2163/t2163.scala b/test/files/jvm/t2163/t2163.scala
index f73b520cbe..fdf19c4e25 100644
--- a/test/files/jvm/t2163/t2163.scala
+++ b/test/files/jvm/t2163/t2163.scala
@@ -1,5 +1,10 @@
+
+import scala.language.{ higherKinds }
+
class T2163Scala[CC[X]](x: CC[Int]) {
def bar[DD[X]](meh: DD[Int]): CC[Int] = x
}
-object Test extends App {}
+object Test extends App {
+ new t2163().test()
+}
diff --git a/test/files/jvm/t2359.scala b/test/files/jvm/t2359.scala
index 69c69d7b3b..76b78d44f7 100644
--- a/test/files/jvm/t2359.scala
+++ b/test/files/jvm/t2359.scala
@@ -1,6 +1,8 @@
-import scala.actors.Futures._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Futures._
def main(args: Array[String]) {
val x = future {
try {
diff --git a/test/files/jvm/t2530.scala b/test/files/jvm/t2530.scala
index c2925a92d1..b41661e623 100644
--- a/test/files/jvm/t2530.scala
+++ b/test/files/jvm/t2530.scala
@@ -1,6 +1,8 @@
-import scala.actors.{Future, Futures}
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.{Future, Futures}
def main(args:Array[String]) : Unit = {
//scala.actors.Debug.level = 3
@@ -29,7 +31,6 @@ object Test {
}
println("Test done with no deadlock. Try again, it will not occur...")
}
-}
case class Matrix(numRows: Int, numCols: Int, values: Array[Double]) {
@@ -94,3 +95,4 @@ case class Matrix(numRows: Int, numCols: Int, values: Array[Double]) {
}
}
+}
diff --git a/test/files/jvm/t3102.scala b/test/files/jvm/t3102.scala
index fbcf2e60e6..d0e0704859 100644
--- a/test/files/jvm/t3102.scala
+++ b/test/files/jvm/t3102.scala
@@ -1,7 +1,10 @@
-import scala.actors.{Actor, TIMEOUT}
-import Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.{Actor, TIMEOUT}
+ import Actor._
+
def main(args: Array[String]) {
val a = actor {
try {
diff --git a/test/files/jvm/t3356.scala b/test/files/jvm/t3356.scala
index a9e83fe503..53bfd737cd 100644
--- a/test/files/jvm/t3356.scala
+++ b/test/files/jvm/t3356.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{Actor, Exit, !, UncaughtException}
import Actor._
@@ -10,7 +14,6 @@ case class ImageInfo(text: String) {
case class ImageData(text: String)
case class Download(info: ImageInfo)
-object Test {
def scanForImageInfo(url: String): List[ImageInfo] =
List(ImageInfo("A"), ImageInfo("B"))
diff --git a/test/files/jvm/t3365.scala b/test/files/jvm/t3365.scala
index b94e804e63..8321428093 100644
--- a/test/files/jvm/t3365.scala
+++ b/test/files/jvm/t3365.scala
@@ -1,3 +1,7 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
+object Test {
import scala.actors.{ReplyReactor, Channel, Actor, Future}
case class ChannelMsg(chan: Channel[Any])
@@ -27,7 +31,6 @@ class MyActor extends Actor {
}
}
-object Test {
def main(args: Array[String]) {
val a = new MyActor
a.start()
diff --git a/test/files/jvm/t3407.scala b/test/files/jvm/t3407.scala
index 6c2ce85c71..757fa3a438 100644
--- a/test/files/jvm/t3407.scala
+++ b/test/files/jvm/t3407.scala
@@ -1,6 +1,8 @@
-import scala.actors._, scala.actors.Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors._, scala.actors.Actor._
def main(args: Array[String]) {
for (i <- 1 to 10) {
diff --git a/test/files/jvm/t3412-channel.scala b/test/files/jvm/t3412-channel.scala
index fcc439b488..af319d2303 100644
--- a/test/files/jvm/t3412-channel.scala
+++ b/test/files/jvm/t3412-channel.scala
@@ -1,6 +1,8 @@
-import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
def main(args: Array[String]) {
diff --git a/test/files/jvm/t3412.scala b/test/files/jvm/t3412.scala
index ced15ab5dc..fde6c04cb7 100644
--- a/test/files/jvm/t3412.scala
+++ b/test/files/jvm/t3412.scala
@@ -1,6 +1,8 @@
-import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors._, scala.actors.Actor._, scala.actors.Futures._
def main(args: Array[String]) {
diff --git a/test/files/jvm/t3470.scala b/test/files/jvm/t3470.scala
index 5e4242cdd7..bcb1d4f8de 100644
--- a/test/files/jvm/t3470.scala
+++ b/test/files/jvm/t3470.scala
@@ -1,6 +1,8 @@
-import scala.actors._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors._
def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
var done = false
diff --git a/test/files/jvm/t3838.scala b/test/files/jvm/t3838.scala
index ba8f15fc31..a1a71d1049 100644
--- a/test/files/jvm/t3838.scala
+++ b/test/files/jvm/t3838.scala
@@ -1,6 +1,8 @@
-import scala.actors.Actor._
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
+ import scala.actors.Actor._
def main(args: Array[String]) {
actor {
try {
diff --git a/test/files/jvm/t560bis.scala b/test/files/jvm/t560bis.scala
index b04303c8a0..21eb8dde28 100644
--- a/test/files/jvm/t560bis.scala
+++ b/test/files/jvm/t560bis.scala
@@ -6,7 +6,7 @@ import scala.xml._;
case _ => Console.println("bah")
}
def foo(args: List[String]) =
- Elem(null,"bla",Null, TopScope, (args map {x => Text(x)}):_*) match {
+ Elem(null,"bla",Null, TopScope, minimizeEmpty = true, (args map {x => Text(x)}):_*) match {
case Elem(_,_,_,_,Text("1"),_*) =>
Console.println("cool!")
case _ =>
diff --git a/test/files/jvm/t6941/Analyzed_1.flags b/test/files/jvm/t6941/Analyzed_1.flags
new file mode 100644
index 0000000000..ad51758c39
--- /dev/null
+++ b/test/files/jvm/t6941/Analyzed_1.flags
@@ -0,0 +1 @@
+-nowarn
diff --git a/test/files/jvm/t7006.check b/test/files/jvm/t7006.check
new file mode 100644
index 0000000000..7c99eba30c
--- /dev/null
+++ b/test/files/jvm/t7006.check
@@ -0,0 +1,28 @@
+[running phase parser on Foo_1.scala]
+[running phase namer on Foo_1.scala]
+[running phase packageobjects on Foo_1.scala]
+[running phase typer on Foo_1.scala]
+[running phase patmat on Foo_1.scala]
+[running phase superaccessors on Foo_1.scala]
+[running phase extmethods on Foo_1.scala]
+[running phase pickler on Foo_1.scala]
+[running phase refchecks on Foo_1.scala]
+[running phase uncurry on Foo_1.scala]
+[running phase tailcalls on Foo_1.scala]
+[running phase specialize on Foo_1.scala]
+[running phase explicitouter on Foo_1.scala]
+[running phase erasure on Foo_1.scala]
+[running phase posterasure on Foo_1.scala]
+[running phase lazyvals on Foo_1.scala]
+[running phase lambdalift on Foo_1.scala]
+[running phase constructors on Foo_1.scala]
+[running phase flatten on Foo_1.scala]
+[running phase mixin on Foo_1.scala]
+[running phase cleanup on Foo_1.scala]
+[running phase icode on Foo_1.scala]
+[running phase inliner on Foo_1.scala]
+[running phase inlinehandlers on Foo_1.scala]
+[running phase closelim on Foo_1.scala]
+[running phase constopt on Foo_1.scala]
+[running phase dce on Foo_1.scala]
+[running phase jvm on icode]
diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala
index 2bd03d6d02..aaa3dc7ca4 100644
--- a/test/files/jvm/t7146.scala
+++ b/test/files/jvm/t7146.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ reflectiveCalls }
import java.util.concurrent.Executor
import scala.concurrent._
import scala.util.control.NoStackTrace
diff --git a/test/files/jvm/try-type-tests.scala b/test/files/jvm/try-type-tests.scala
index 17811f64c5..e5e53ee737 100644
--- a/test/files/jvm/try-type-tests.scala
+++ b/test/files/jvm/try-type-tests.scala
@@ -49,6 +49,7 @@ trait TryStandard {
n match {
case Success(v) => assert(false)
case Failure(e: NoSuchElementException) => assert(true)
+ case _ => assert(false)
}
}
@@ -141,4 +142,4 @@ object Test
extends App
with TryStandard {
System.exit(0)
-} \ No newline at end of file
+}
diff --git a/test/files/jvm/unreachable/Foo_1.scala b/test/files/jvm/unreachable/Foo_1.scala
index d17421c516..c223718097 100644
--- a/test/files/jvm/unreachable/Foo_1.scala
+++ b/test/files/jvm/unreachable/Foo_1.scala
@@ -1,3 +1,5 @@
+import scala.sys.error
+
class Foo_1 {
def unreachableNormalExit: Int = {
return 42
@@ -107,4 +109,4 @@ class Foo_1 {
}
2
}
-} \ No newline at end of file
+}
diff --git a/test/files/jvm/xml01.scala b/test/files/jvm/xml01.scala
index 2fab650637..2b456f5ff5 100644
--- a/test/files/jvm/xml01.scala
+++ b/test/files/jvm/xml01.scala
@@ -1,10 +1,11 @@
import java.io.StringReader
import org.xml.sax.InputSource
-import scala.util.logging._
import scala.xml._
object Test extends App {
+ def Elem(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
+ scala.xml.Elem.apply(prefix, label, attributes, scope, minimizeEmpty = true, child: _*)
val e: scala.xml.MetaData = Null //Node.NoAttributes
val sc: scala.xml.NamespaceBinding = TopScope
diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check
index edcdbdd2ba..599cbad686 100755
--- a/test/files/jvm/xml03syntax.check
+++ b/test/files/jvm/xml03syntax.check
@@ -24,3 +24,4 @@ true
node=<elem key="<b>hello</b>"/>, key=Some(<b>hello</b>)
node=<elem/>, key=None
+<a>Š</a>
diff --git a/test/files/jvm/xml03syntax.scala b/test/files/jvm/xml03syntax.scala
index 2c93f7c176..41663681c7 100644
--- a/test/files/jvm/xml03syntax.scala
+++ b/test/files/jvm/xml03syntax.scala
@@ -91,7 +91,7 @@ object Test {
}
val parsed = parser.element(TopScope) // parse the source as element
// alternatively, we could call document()
- parsed
+ println(parsed)
}
}
diff --git a/test/files/neg/abstract-explaintypes.check b/test/files/neg/abstract-explaintypes.check
index 59c1ad2378..e303b45a32 100644
--- a/test/files/neg/abstract-explaintypes.check
+++ b/test/files/neg/abstract-explaintypes.check
@@ -3,9 +3,13 @@ abstract-explaintypes.scala:6: error: type mismatch;
required: A.this.T
def foo2: T = bar().baz();
^
+A <: A.this.T?
+false
abstract-explaintypes.scala:9: error: type mismatch;
found : A
required: A.this.T
def foo5: T = baz().baz();
^
+A <: A.this.T?
+false
two errors found
diff --git a/test/files/neg/choices.check b/test/files/neg/choices.check
index 3e63f9999d..b114394e96 100644
--- a/test/files/neg/choices.check
+++ b/test/files/neg/choices.check
@@ -1,2 +1,2 @@
-partest error: bad flags: -Ylinearizer
+error: bad options: -Yresolve-term-conflict
one error found
diff --git a/test/files/neg/choices.flags b/test/files/neg/choices.flags
index 5464a18c5d..9718467d4c 100644
--- a/test/files/neg/choices.flags
+++ b/test/files/neg/choices.flags
@@ -1 +1 @@
--Ylinearizer \ No newline at end of file
+-Yresolve-term-conflict
diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check
new file mode 100644
index 0000000000..ce5b205832
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.check
@@ -0,0 +1,12 @@
+delayed-init-ref.scala:17: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println(O.vall) // warn
+ ^
+delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println(vall) // warn
+ ^
+delayed-init-ref.scala:40: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println({locally(()); this}.foo) // warn (spurious, but we can't discriminate)
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/delayed-init-ref.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/delayed-init-ref.scala b/test/files/neg/delayed-init-ref.scala
new file mode 100644
index 0000000000..f2aa804e28
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.scala
@@ -0,0 +1,42 @@
+trait T {
+ val traitVal = ""
+}
+
+object O extends App with T {
+ val vall = ""
+ lazy val lazyy = ""
+ def deff = ""
+
+ println(vall) // no warn
+ new {
+ println(vall) // no warn
+ }
+}
+
+object Client {
+ println(O.vall) // warn
+ import O.vall
+ println(vall) // warn
+
+ println(O.lazyy) // no warn
+ println(O.deff) // no warn
+ println(O.traitVal) // no warn
+}
+
+// Delayed init usage pattern from Specs2
+// See: https://groups.google.com/d/msg/scala-sips/wP6dL8nIAQs/ogjoPE-MSVAJ
+trait Before extends DelayedInit {
+ def before()
+ override def delayedInit(x: => Unit): Unit = { before; x }
+}
+object Spec {
+ trait UserContext extends Before {
+ def before() = ()
+ val foo = "foo"
+ }
+ new UserContext {
+ println(foo) // no warn
+ println(this.foo) // no warn
+ println({locally(()); this}.foo) // warn (spurious, but we can't discriminate)
+ }
+}
diff --git a/test/files/neg/javac-error.check b/test/files/neg/javac-error.check
new file mode 100644
index 0000000000..e7d1ccc1a1
--- /dev/null
+++ b/test/files/neg/javac-error.check
@@ -0,0 +1,10 @@
+#partest java6
+javac-error/J.java:2: method does not override or implement a method from a supertype
+ @Override public void foo() { }
+ ^
+1 error
+#partest java7
+javac-error/J.java:2: error: method does not override or implement a method from a supertype
+ @Override public void foo() { }
+ ^
+1 error
diff --git a/test/files/neg/javac-error.flags b/test/files/neg/javac-error.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/javac-error.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/javac-error/J.java b/test/files/neg/javac-error/J.java
new file mode 100644
index 0000000000..83f50c9ae2
--- /dev/null
+++ b/test/files/neg/javac-error/J.java
@@ -0,0 +1,5 @@
+public class J {
+ @Override public void foo() { }
+
+ public void bar() { foo(); }
+}
diff --git a/test/files/neg/javac-error/SUT_5.scala b/test/files/neg/javac-error/SUT_5.scala
new file mode 100644
index 0000000000..0a996352c0
--- /dev/null
+++ b/test/files/neg/javac-error/SUT_5.scala
@@ -0,0 +1,5 @@
+
+/** The System Under Test.
+ * We bail on the earlier round that generates the first error.
+ */
+class SUT extends J
diff --git a/test/files/neg/macro-divergence-controlled.check b/test/files/neg/macro-divergence-controlled.check
new file mode 100644
index 0000000000..4876f7cf96
--- /dev/null
+++ b/test/files/neg/macro-divergence-controlled.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: could not find implicit value for parameter e: Complex[Foo]
+ println(implicitly[Complex[Foo]])
+ ^
+one error found
diff --git a/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
new file mode 100644
index 0000000000..59acaede65
--- /dev/null
+++ b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Complex[T]
+
+class Foo(val foo: Foo)
+
+object Complex {
+ def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
+ import c.universe._
+ val tpe = weakTypeOf[T]
+ for (f <- tpe.declarations.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
+ val trecur = appliedType(typeOf[Complex[_]], List(f.typeSignature))
+ if (c.openImplicits.tail.exists(ic => ic.pt =:= trecur)) c.abort(c.enclosingPosition, "diverging implicit expansion. reported by a macro!")
+ val recur = c.inferImplicitValue(trecur, silent = true)
+ if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
+ }
+ c.literalNull
+ }
+
+ implicit object ComplexString extends Complex[String]
+ implicit def genComplex[T]: Complex[T] = macro impl[T]
+}
diff --git a/test/files/neg/macro-divergence-controlled/Test_2.scala b/test/files/neg/macro-divergence-controlled/Test_2.scala
new file mode 100644
index 0000000000..dcc4593335
--- /dev/null
+++ b/test/files/neg/macro-divergence-controlled/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(implicitly[Complex[Foo]])
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-a.check b/test/files/neg/macro-invalidimpl-a.check
deleted file mode 100644
index 7f11f3b865..0000000000
--- a/test/files/neg/macro-invalidimpl-a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro impls.foo
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-a/Impls_1.scala b/test/files/neg/macro-invalidimpl-a/Impls_1.scala
deleted file mode 100644
index cfa1218038..0000000000
--- a/test/files/neg/macro-invalidimpl-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-class Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala
deleted file mode 100644
index 2220ddae0c..0000000000
--- a/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
- val impls = new Impls
- def foo(x: Any) = macro impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-b.check b/test/files/neg/macro-invalidimpl-b.check
deleted file mode 100644
index 7f11f3b865..0000000000
--- a/test/files/neg/macro-invalidimpl-b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro impls.foo
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala
deleted file mode 100644
index 81e40837d2..0000000000
--- a/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
- val impls = Impls
- def foo(x: Any) = macro impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c.check b/test/files/neg/macro-invalidimpl-c.check
deleted file mode 100644
index 9e0181c0a3..0000000000
--- a/test/files/neg/macro-invalidimpl-c.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Impls_Macros_1.scala:8: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro Impls.foo
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala b/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala
deleted file mode 100644
index 67a0eb348b..0000000000
--- a/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-class Macros {
- object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
- }
-
- def foo(x: Any) = macro Impls.foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d.check b/test/files/neg/macro-invalidimpl-d.check
deleted file mode 100644
index 76a5ba9c8c..0000000000
--- a/test/files/neg/macro-invalidimpl-d.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation must be in statically accessible object
- def foo(x: Any) = macro Impls.foo
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-d/Impls_1.scala b/test/files/neg/macro-invalidimpl-d/Impls_1.scala
deleted file mode 100644
index e0819c938c..0000000000
--- a/test/files/neg/macro-invalidimpl-d/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-trait MacroHelpers {
- object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = x
- }
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala
deleted file mode 100644
index 067ab1ddec..0000000000
--- a/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Macros extends MacroHelpers {
- def foo(x: Any) = macro Impls.foo
-}
-
-object Test extends App {
- println(new Macros().foo(42))
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-e.check b/test/files/neg/macro-invalidimpl-e.check
deleted file mode 100644
index e0910b2899..0000000000
--- a/test/files/neg/macro-invalidimpl-e.check
+++ /dev/null
@@ -1,13 +0,0 @@
-Macros_Test_2.scala:2: error: ambiguous reference to overloaded definition,
-both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
-and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
-match expected type ?
- def foo(x: Any) = macro Impls.foo
- ^
-Macros_Test_2.scala:3: error: ambiguous reference to overloaded definition,
-both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
-and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
-match expected type ?
- def foo(x: Any, y: Any) = macro Impls.foo
- ^
-two errors found
diff --git a/test/files/neg/macro-invalidimpl-e/Impls_1.scala b/test/files/neg/macro-invalidimpl-e/Impls_1.scala
deleted file mode 100644
index fd40119c31..0000000000
--- a/test/files/neg/macro-invalidimpl-e/Impls_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
- def foo(c: Ctx)(x: c.Expr[Any], y: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala
deleted file mode 100644
index 6edde08167..0000000000
--- a/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
- def foo(x: Any) = macro Impls.foo
- def foo(x: Any, y: Any) = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f.check b/test/files/neg/macro-invalidimpl-f.check
deleted file mode 100644
index 4e5851f566..0000000000
--- a/test/files/neg/macro-invalidimpl-f.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
- found : (c: scala.reflect.macros.Context): c.Expr[Unit]
-number of parameter sections differ
- def bar1() = macro Impls.fooNullary
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-f/Impls_1.scala b/test/files/neg/macro-invalidimpl-f/Impls_1.scala
deleted file mode 100644
index 0e4da86d22..0000000000
--- a/test/files/neg/macro-invalidimpl-f/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def fooNullary(c: Ctx) = {
- import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
- c.Expr[Unit](body)
- }
-
- def fooEmpty(c: Ctx)() = fooNullary(c)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala
deleted file mode 100644
index 493edf1df8..0000000000
--- a/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Macros {
- def bar1() = macro Impls.fooNullary
-}
-
-object Test extends App {
- Macros.bar1
- Macros.bar1()
- println("kkthxbai")
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g.check b/test/files/neg/macro-invalidimpl-g.check
deleted file mode 100644
index 7342f7336f..0000000000
--- a/test/files/neg/macro-invalidimpl-g.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Macros_Test_2.scala:2: error: macro implementation has wrong shape:
- required: (c: scala.reflect.macros.Context): c.Expr[Unit]
- found : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
-number of parameter sections differ
- def foo1 = macro Impls.fooEmpty
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-g/Impls_1.scala b/test/files/neg/macro-invalidimpl-g/Impls_1.scala
deleted file mode 100644
index 0e4da86d22..0000000000
--- a/test/files/neg/macro-invalidimpl-g/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def fooNullary(c: Ctx) = {
- import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
- c.Expr[Unit](body)
- }
-
- def fooEmpty(c: Ctx)() = fooNullary(c)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala
deleted file mode 100644
index 5561db9f9a..0000000000
--- a/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo1 = macro Impls.fooEmpty
-}
-
-object Test extends App {
- Macros.foo1
- println("kkthxbai")
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-h.check b/test/files/neg/macro-invalidimpl-h.check
deleted file mode 100644
index ea76e1aeac..0000000000
--- a/test/files/neg/macro-invalidimpl-h.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:2: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
- def foo = macro Impls.foo[String]
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-h/Impls_1.scala b/test/files/neg/macro-invalidimpl-h/Impls_1.scala
deleted file mode 100644
index 427fd3d5c0..0000000000
--- a/test/files/neg/macro-invalidimpl-h/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U <: Int](c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidimpl-i.check b/test/files/neg/macro-invalidimpl-i.check
deleted file mode 100644
index 846ed8d134..0000000000
--- a/test/files/neg/macro-invalidimpl-i.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Macros_Test_2.scala:4: error: macro implementation must be public
- def foo = macro Impls.impl
- ^
-one error found
diff --git a/test/files/neg/macro-invalidimpl-i/Impls_1.scala b/test/files/neg/macro-invalidimpl-i/Impls_1.scala
deleted file mode 100644
index c35d8ab3c1..0000000000
--- a/test/files/neg/macro-invalidimpl-i/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package foo
-
-import scala.reflect.macros.Context
-
-object Impls {
- private[foo] def impl(c: Context) = ???
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
deleted file mode 100644
index fb129c70be..0000000000
--- a/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package foo
-
-object Test extends App {
- def foo = macro Impls.impl
-}
diff --git a/test/files/neg/macro-invalidimpl.check b/test/files/neg/macro-invalidimpl.check
new file mode 100644
index 0000000000..aaf4f88fc2
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl.check
@@ -0,0 +1,51 @@
+Macros_Test_2.scala:5: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo(x: Any) = macro impls.foo
+ ^
+Macros_Test_2.scala:10: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo(x: Any) = macro impls.foo
+ ^
+Macros_Test_2.scala:18: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo(x: Any) = macro Impls3.foo
+ ^
+Macros_Test_2.scala:22: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo(x: Any) = macro Impls4.foo
+ ^
+Macros_Test_2.scala:26: error: ambiguous reference to overloaded definition,
+both method foo in object Impls5 of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and method foo in object Impls5 of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+ def foo(x: Any) = macro Impls5.foo
+ ^
+Macros_Test_2.scala:27: error: ambiguous reference to overloaded definition,
+both method foo in object Impls5 of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and method foo in object Impls5 of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+ def foo(x: Any, y: Any) = macro Impls5.foo
+ ^
+Macros_Test_2.scala:31: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+number of parameter sections differ
+ def foo1 = macro Impls6.fooEmpty
+ ^
+Macros_Test_2.scala:32: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context): c.Expr[Unit]
+number of parameter sections differ
+ def bar1() = macro Impls6.fooNullary
+ ^
+Macros_Test_2.scala:36: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
+ def foo = macro Impls7.foo[String]
+ ^
+Macros_Test_2.scala:53: error: macro implementation must be public
+ def foo = macro Impls8.impl
+ ^
+10 errors found
diff --git a/test/files/neg/macro-invalidimpl-a.flags b/test/files/neg/macro-invalidimpl.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-a.flags
+++ b/test/files/neg/macro-invalidimpl.flags
diff --git a/test/files/neg/macro-invalidimpl/Impls_1.scala b/test/files/neg/macro-invalidimpl/Impls_1.scala
new file mode 100644
index 0000000000..cf78ecc65a
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl/Impls_1.scala
@@ -0,0 +1,40 @@
+import scala.reflect.macros.Context
+
+class Impls1 {
+ def foo(c: Context)(x: c.Expr[Any]) = ???
+}
+
+object Impls2 {
+ def foo(c: Context)(x: c.Expr[Any]) = ???
+}
+
+trait MacroHelpers {
+ object Impls4 {
+ def foo(c: Context)(x: c.Expr[Any]) = x
+ }
+}
+
+object Impls5 {
+ def foo(c: Context)(x: c.Expr[Any]) = ???
+ def foo(c: Context)(x: c.Expr[Any], y: c.Expr[Any]) = ???
+}
+
+object Impls6 {
+ def fooNullary(c: Context) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
+ c.Expr[Unit](body)
+ }
+
+ def fooEmpty(c: Context)() = fooNullary(c)
+}
+
+object Impls7 {
+ def foo[U <: Int](c: Context) = ???
+}
+
+package foo {
+ object Impls8 {
+ private[foo] def impl(c: Context) = ???
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl/Macros_Test_2.scala
new file mode 100644
index 0000000000..8aae9553f5
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl/Macros_Test_2.scala
@@ -0,0 +1,55 @@
+import scala.reflect.macros.Context
+
+object Macros1 {
+ val impls = new Impls1
+ def foo(x: Any) = macro impls.foo
+}
+
+object Macros2 {
+ val impls = Impls2
+ def foo(x: Any) = macro impls.foo
+}
+
+class Macros3 {
+ object Impls3 {
+ def foo(c: Context)(x: c.Expr[Any]) = ???
+ }
+
+ def foo(x: Any) = macro Impls3.foo
+}
+
+class Macros4 extends MacroHelpers {
+ def foo(x: Any) = macro Impls4.foo
+}
+
+object Macros5 {
+ def foo(x: Any) = macro Impls5.foo
+ def foo(x: Any, y: Any) = macro Impls5.foo
+}
+
+object Macros6 {
+ def foo1 = macro Impls6.fooEmpty
+ def bar1() = macro Impls6.fooNullary
+}
+
+object Macros7 {
+ def foo = macro Impls7.foo[String]
+}
+
+object Test extends App {
+ println(Macros1.foo(42))
+ println(Macros2.foo(42))
+ println(new Macros3().foo(42))
+ println(new Macros4().foo(42))
+ println(Macros5.foo(42))
+ println(Macros6.foo1)
+ println(Macros6.bar1)
+ println(Macros6.bar1())
+ println(Macros7.foo)
+}
+
+package foo {
+ object Test extends App {
+ def foo = macro Impls8.impl
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nontree/Impls_1.scala b/test/files/neg/macro-invalidret-nontree/Impls_1.scala
deleted file mode 100644
index ef19b1b405..0000000000
--- a/test/files/neg/macro-invalidret-nontree/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx) = 2
-}
diff --git a/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala b/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2832..0000000000
--- a/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
deleted file mode 100644
index f98376a2ba..0000000000
--- a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-import scala.reflect.runtime.{universe => ru}
-
-object Impls {
- def foo(c: Ctx) = ru.Literal(ru.Constant(42))
-}
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala b/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2832..0000000000
--- a/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check
new file mode 100644
index 0000000000..8c6ed4eb45
--- /dev/null
+++ b/test/files/neg/macro-invalidret.check
@@ -0,0 +1,13 @@
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): Int
+type mismatch for return type: Int does not conform to c.Expr[Any]
+ def foo1 = macro Impls.foo1
+ ^
+Macros_Test_2.scala:3: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): reflect.runtime.universe.Literal
+type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any]
+ def foo2 = macro Impls.foo2
+ ^
+two errors found
diff --git a/test/files/neg/macro-invalidimpl-b.flags b/test/files/neg/macro-invalidret.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-b.flags
+++ b/test/files/neg/macro-invalidret.flags
diff --git a/test/files/neg/macro-invalidret/Impls_1.scala b/test/files/neg/macro-invalidret/Impls_1.scala
new file mode 100644
index 0000000000..a58af1a23c
--- /dev/null
+++ b/test/files/neg/macro-invalidret/Impls_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros.Context
+import scala.reflect.runtime.{universe => ru}
+
+object Impls {
+ def foo1(c: Context) = 2
+ def foo2(c: Context) = ru.Literal(ru.Constant(42))
+}
diff --git a/test/files/neg/macro-invalidret/Macros_Test_2.scala b/test/files/neg/macro-invalidret/Macros_Test_2.scala
new file mode 100644
index 0000000000..f8880fa023
--- /dev/null
+++ b/test/files/neg/macro-invalidret/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+ def foo1 = macro Impls.foo1
+ def foo2 = macro Impls.foo2
+}
+
+object Test extends App {
+ import Macros._
+ foo1
+ foo2
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-a.check b/test/files/neg/macro-invalidshape-a.check
deleted file mode 100644
index f38a90819e..0000000000
--- a/test/files/neg/macro-invalidshape-a.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro [<implementation object>].<method name>[[<type args>]]
- def foo(x: Any) = macro 2
- ^
-one error found
diff --git a/test/files/neg/macro-invalidshape-a/Impls_1.scala b/test/files/neg/macro-invalidshape-a/Impls_1.scala
deleted file mode 100644
index 4467021545..0000000000
--- a/test/files/neg/macro-invalidshape-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala
deleted file mode 100644
index ffff17d1e7..0000000000
--- a/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo(x: Any) = macro 2
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-b.check b/test/files/neg/macro-invalidshape-b.check
deleted file mode 100644
index 976685c6bd..0000000000
--- a/test/files/neg/macro-invalidshape-b.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Macros_Test_2.scala:2: error: macro body has wrong shape:
- required: macro [<implementation object>].<method name>[[<type args>]]
- def foo(x: Any) = macro Impls.foo(null)(null)
- ^
-one error found
diff --git a/test/files/neg/macro-invalidshape-b/Impls_1.scala b/test/files/neg/macro-invalidshape-b/Impls_1.scala
deleted file mode 100644
index 4467021545..0000000000
--- a/test/files/neg/macro-invalidshape-b/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala
deleted file mode 100644
index b67cd32a6e..0000000000
--- a/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo(x: Any) = macro Impls.foo(null)(null)
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-c.check b/test/files/neg/macro-invalidshape-c.check
deleted file mode 100644
index 0b2e9cfe4f..0000000000
--- a/test/files/neg/macro-invalidshape-c.check
+++ /dev/null
@@ -1,9 +0,0 @@
-Macros_Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
- def foo(x: Any) = macro {2; Impls.foo}
- ^
-Macros_Test_2.scala:2: error: missing arguments for method foo in object Impls;
-follow this method with `_' if you want to treat it as a partially applied function
- def foo(x: Any) = macro {2; Impls.foo}
- ^
-one warning found
-one error found
diff --git a/test/files/neg/macro-invalidshape-c/Impls_1.scala b/test/files/neg/macro-invalidshape-c/Impls_1.scala
deleted file mode 100644
index 4467021545..0000000000
--- a/test/files/neg/macro-invalidshape-c/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala
deleted file mode 100644
index 552c3710c7..0000000000
--- a/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo(x: Any) = macro {2; Impls.foo}
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-d.check b/test/files/neg/macro-invalidshape-d.check
deleted file mode 100644
index e43a2ca0ab..0000000000
--- a/test/files/neg/macro-invalidshape-d.check
+++ /dev/null
@@ -1,8 +0,0 @@
-Macros_Test_2.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated
- def foo(x: Any) = {2; macro Impls.foo}
- ^
-Macros_Test_2.scala:2: error: ';' expected but '.' found.
- def foo(x: Any) = {2; macro Impls.foo}
- ^
-one warning found
-one error found
diff --git a/test/files/neg/macro-invalidshape-d.flags b/test/files/neg/macro-invalidshape-d.flags
deleted file mode 100644
index 83b7265eb9..0000000000
--- a/test/files/neg/macro-invalidshape-d.flags
+++ /dev/null
@@ -1 +0,0 @@
--deprecation -language:experimental.macros
diff --git a/test/files/neg/macro-invalidshape-d/Impls_1.scala b/test/files/neg/macro-invalidshape-d/Impls_1.scala
deleted file mode 100644
index 4467021545..0000000000
--- a/test/files/neg/macro-invalidshape-d/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Any]) = ???
-}
diff --git a/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala
deleted file mode 100644
index bacd9a6e7c..0000000000
--- a/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo(x: Any) = {2; macro Impls.foo}
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape.check b/test/files/neg/macro-invalidshape.check
new file mode 100644
index 0000000000..1938f5ae47
--- /dev/null
+++ b/test/files/neg/macro-invalidshape.check
@@ -0,0 +1,20 @@
+Macros_Test_2.scala:2: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo1(x: Any) = macro 2
+ ^
+Macros_Test_2.scala:3: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo2(x: Any) = macro Impls.foo(null)(null)
+ ^
+Macros_Test_2.scala:4: error: missing arguments for method foo in object Impls;
+follow this method with `_' if you want to treat it as a partially applied function
+ def foo3(x: Any) = macro {2; Impls.foo}
+ ^
+Macros_Test_2.scala:7: error: macro implementation reference has wrong shape. required:
+macro [<static object>].<method name>[[<type args>]] or
+macro [<macro bundle>].<method name>[[<type args>]]
+ def foo = macro impl
+ ^
+four errors found
diff --git a/test/files/neg/macro-invalidimpl-c.flags b/test/files/neg/macro-invalidshape.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-c.flags
+++ b/test/files/neg/macro-invalidshape.flags
diff --git a/test/files/neg/macro-invalidimpl-b/Impls_1.scala b/test/files/neg/macro-invalidshape/Impls_1.scala
index 4467021545..4467021545 100644
--- a/test/files/neg/macro-invalidimpl-b/Impls_1.scala
+++ b/test/files/neg/macro-invalidshape/Impls_1.scala
diff --git a/test/files/neg/macro-invalidshape/Macros_Test_2.scala b/test/files/neg/macro-invalidshape/Macros_Test_2.scala
new file mode 100644
index 0000000000..cf37e14d8e
--- /dev/null
+++ b/test/files/neg/macro-invalidshape/Macros_Test_2.scala
@@ -0,0 +1,17 @@
+object Macros {
+ def foo1(x: Any) = macro 2
+ def foo2(x: Any) = macro Impls.foo(null)(null)
+ def foo3(x: Any) = macro {2; Impls.foo}
+ {
+ def impl(c: scala.reflect.macros.Context) = c.literalUnit
+ def foo = macro impl
+ foo
+ }
+}
+
+object Test extends App {
+ import Macros._
+ foo1(42)
+ foo2(42)
+ foo3(42)
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
deleted file mode 100644
index c066c485b1..0000000000
--- a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U: c.WeakTypeTag: Numeric](c: Ctx) = {
- import c.universe._
- Literal(Constant(42))
- }
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala
deleted file mode 100644
index 5b4602f328..0000000000
--- a/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo[U] = macro Impls.foo[U]
-}
-
-object Test extends App {
- import Macros._
- println(foo[String])
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala
deleted file mode 100644
index 4d5d29158f..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Impls {
- def foo = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2832..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala
deleted file mode 100644
index cf1a4cf85c..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.api.{Universe => Ctx}
-
-object Impls {
- def foo(c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2832..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala
deleted file mode 100644
index c4ed8be91e..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(cs: Ctx*) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2832..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala
deleted file mode 100644
index 6904cfb1dc..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala
deleted file mode 100644
index e053cf99df..0000000000
--- a/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo(x: Any) = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
deleted file mode 100644
index 8205694768..0000000000
--- a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo_targs[T, U: c.WeakTypeTag](c: Ctx)(implicit x: c.Expr[Int]) = {
- import c.{prefix => prefix}
- import c.universe._
- val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo_targs...")))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
- Literal(Constant(())))
- c.Expr[Unit](body)
- }
-}
-
-class Macros[T] {
- def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala b/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala
deleted file mode 100644
index 90e850df21..0000000000
--- a/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- println("foo_targs:")
- new Macros[Int]().foo_targs[String](42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala
deleted file mode 100644
index ae16612b93..0000000000
--- a/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = ???
-}
-
-object Macros {
- def foo(x: Int) = macro Impls.foo
-}
diff --git a/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala b/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala
deleted file mode 100644
index cbd6232073..0000000000
--- a/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badtype.check b/test/files/neg/macro-invalidsig-params-badtype.check
index 3ec40d7e5b..3cc1c9abf1 100644
--- a/test/files/neg/macro-invalidsig-params-badtype.check
+++ b/test/files/neg/macro-invalidsig-params-badtype.check
@@ -1,7 +1,7 @@
Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
- found : (c: scala.reflect.macros.Context)(x: c.universe.Tree): Nothing
-type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Tree
+ found : (c: scala.reflect.macros.Context)(x: Int): Nothing
+type mismatch for parameter x: c.Expr[Int] does not conform to Int
def foo(x: Int) = macro Impls.foo
- ^
+ ^
one error found
diff --git a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
index ab90b85881..175683d6d3 100644
--- a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
+++ b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
@@ -1,7 +1,7 @@
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo(c: Ctx)(x: c.universe.Tree) = ???
+ def foo(c: Ctx)(x: Int) = ???
}
object Macros {
diff --git a/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala b/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala
deleted file mode 100644
index cbd6232073..0000000000
--- a/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- import Macros._
- foo(42)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala
deleted file mode 100644
index b4c75ad0ba..0000000000
--- a/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(xs: c.Expr[Int]*) = ???
-}
-
-object Macros {
- def foo(x: Int, y: Int) = macro Impls.foo
-}
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
deleted file mode 100644
index fa50ac4f73..0000000000
--- a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- import Macros._
- foo(42, 100)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala
deleted file mode 100644
index c7cf0b06c4..0000000000
--- a/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(y: c.Expr[Int], x: c.Expr[Int]) = ???
-}
-
-object Macros {
- def foo(x: Int, y: Int) = macro Impls.foo
-}
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
deleted file mode 100644
index fa50ac4f73..0000000000
--- a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- import Macros._
- foo(42, 100)
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala
deleted file mode 100644
index dbeca178a7..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U](c: Ctx)(U: c.universe.Type) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala
deleted file mode 100644
index a82e813221..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo[U] = macro Impls.foo[U]
-}
-
-object Test extends App {
- import Macros._
- foo[Int]
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala
deleted file mode 100644
index 89020de7dd..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U <: String](c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala
deleted file mode 100644
index a82e813221..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo[U] = macro Impls.foo[U]
-}
-
-object Test extends App {
- import Macros._
- foo[Int]
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala
deleted file mode 100644
index 89020de7dd..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U <: String](c: Ctx) = ???
-}
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala
deleted file mode 100644
index eed6369a16..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo[U <: Int] = macro Impls.foo[U]
-}
-
-object Test extends App {
- import Macros._
- foo[Int]
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
deleted file mode 100644
index f8b3c92869..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U: c.WeakTypeTag](c: Ctx) = ???
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala
deleted file mode 100644
index 96a8de2832..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
deleted file mode 100644
index baf3aab9e3..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- println(implicitly[c.WeakTypeTag[T]])
- println(implicitly[c.WeakTypeTag[U]])
- println(V)
- c.literalUnit
- }
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala
deleted file mode 100644
index 7d02bf613a..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class D[T] {
- class C[U] {
- def foo[V] = macro Impls.foo
- }
-}
-
-object Test extends App {
- val outer1 = new D[Int]
- val outer2 = new outer1.C[String]
- outer2.foo[Boolean]
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
deleted file mode 100644
index 44b4ed6ab3..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- import c.universe._
- println(implicitly[c.WeakTypeTag[T]])
- println(implicitly[c.WeakTypeTag[U]])
- println(V)
- c.literalUnit
- }
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala
deleted file mode 100644
index 109e142e52..0000000000
--- a/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class D[T] {
- class C[U] {
- def foo[V] = macro Impls.foo[V]
- }
-}
-
-object Test extends App {
- val outer1 = new D[Int]
- val outer2 = new outer1.C[String]
- outer2.foo[Boolean]
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig.check b/test/files/neg/macro-invalidsig.check
new file mode 100644
index 0000000000..cbdaf51081
--- /dev/null
+++ b/test/files/neg/macro-invalidsig.check
@@ -0,0 +1,82 @@
+Macros_Test_2.scala:2: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(implicit evidence$2: Numeric[U]): c.universe.Literal
+macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+ def foo[U] = macro Impls1.foo[U]
+ ^
+Macros_Test_2.scala:6: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : : Nothing
+number of parameter sections differ
+ def foo = macro Impls2.foo
+ ^
+Macros_Test_2.scala:10: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.api.Universe): Nothing
+type mismatch for parameter c: scala.reflect.macros.Context does not conform to scala.reflect.api.Universe
+ def foo = macro Impls3.foo
+ ^
+Macros_Test_2.scala:14: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (cs: scala.reflect.macros.Context*): Nothing
+types incompatible for parameter cs: corresponding is not a vararg parameter
+ def foo = macro Impls4.foo
+ ^
+Macros_Test_2.scala:18: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Any]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): Nothing
+number of parameter sections differ
+ def foo(x: Any) = macro Impls5.foo
+ ^
+Macros_Test_2.scala:22: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context)(implicit x: c.Expr[Int]): c.Expr[Unit]
+macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+ def foo[U](x: Int) = macro Impls6.foo[T, U]
+ ^
+Macros_Test_2.scala:26: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
+parameter lists have different length, found extra parameter y: c.Expr[Int]
+ def foo(x: Int) = macro Impls7.foo
+ ^
+Macros_Test_2.scala:30: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(x: c.universe.Symbol): Nothing
+type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Symbol
+ def foo(x: Int) = macro Impls8.foo
+ ^
+Macros_Test_2.scala:34: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(xs: c.Expr[Int]*): Nothing
+parameter lists have different length, required extra parameter y: c.Expr[Int]
+ def foo(x: Int, y: Int) = macro Impls9.foo
+ ^
+Macros_Test_2.scala:38: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
+parameter names differ: x != y
+ def foo(x: Int, y: Int) = macro Impls10.foo
+ ^
+Macros_Test_2.scala:42: error: macro implementation has wrong shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(U: c.universe.Type): Nothing
+number of parameter sections differ
+ def foo[U] = macro Impls11.foo[U]
+ ^
+Macros_Test_2.scala:46: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+ def foo[U] = macro Impls12.foo[U]
+ ^
+Macros_Test_2.scala:50: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+ def foo[U <: Int] = macro Impls13.foo[U]
+ ^
+Macros_Test_2.scala:54: error: wrong number of type parameters for method foo: [U](c: scala.reflect.macros.Context)(implicit evidence$4: c.WeakTypeTag[U])Nothing
+ def foo = macro Impls14.foo
+ ^
+Macros_Test_2.scala:59: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$5: c.WeakTypeTag[T], implicit evidence$6: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+ def foo15[V] = macro Impls15.foo
+ ^
+Macros_Test_2.scala:60: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$7: c.WeakTypeTag[T], implicit evidence$8: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+ def foo16[V] = macro Impls16.foo[V]
+ ^
+16 errors found
diff --git a/test/files/neg/macro-invalidimpl-d.flags b/test/files/neg/macro-invalidsig.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-d.flags
+++ b/test/files/neg/macro-invalidsig.flags
diff --git a/test/files/neg/macro-invalidsig/Impls_1.scala b/test/files/neg/macro-invalidsig/Impls_1.scala
new file mode 100644
index 0000000000..e7d6c18f8d
--- /dev/null
+++ b/test/files/neg/macro-invalidsig/Impls_1.scala
@@ -0,0 +1,88 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+
+object Impls1 {
+ def foo[U: c.WeakTypeTag: Numeric](c: Context) = {
+ import c.universe._
+ Literal(Constant(42))
+ }
+}
+
+object Impls2 {
+ def foo = ???
+}
+
+object Impls3 {
+ def foo(c: scala.reflect.api.Universe) = ???
+}
+
+object Impls4 {
+ def foo(cs: Context*) = ???
+}
+
+object Impls5 {
+ def foo(c: Context) = ???
+}
+
+object Impls6 {
+ def foo[T, U: c.WeakTypeTag](c: Context)(implicit x: c.Expr[Int]) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val body = Block(List(
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo_targs...")))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
+ Literal(Constant(())))
+ c.Expr[Unit](body)
+ }
+}
+
+object Impls7 {
+ def foo(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = ???
+}
+
+object Impls8 {
+ def foo(c: Context)(x: c.universe.Symbol) = ???
+}
+
+object Impls9 {
+ def foo(c: Context)(xs: c.Expr[Int]*) = ???
+}
+
+object Impls10 {
+ def foo(c: Context)(y: c.Expr[Int], x: c.Expr[Int]) = ???
+}
+
+object Impls11 {
+ def foo[U](c: Context)(U: c.universe.Type) = ???
+}
+
+object Impls12 {
+ def foo[U <: String](c: Context) = ???
+}
+
+object Impls13 {
+ def foo[U <: String](c: Context) = ???
+}
+
+object Impls14 {
+ def foo[U: c.WeakTypeTag](c: Context) = ???
+}
+
+object Impls15 {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Context)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ println(implicitly[c.WeakTypeTag[T]])
+ println(implicitly[c.WeakTypeTag[U]])
+ println(V)
+ c.literalUnit
+ }
+}
+
+object Impls16 {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Context)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ println(implicitly[c.WeakTypeTag[T]])
+ println(implicitly[c.WeakTypeTag[U]])
+ println(V)
+ c.literalUnit
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig/Macros_Test_2.scala b/test/files/neg/macro-invalidsig/Macros_Test_2.scala
new file mode 100644
index 0000000000..0a6a321431
--- /dev/null
+++ b/test/files/neg/macro-invalidsig/Macros_Test_2.scala
@@ -0,0 +1,83 @@
+object Macros1 {
+ def foo[U] = macro Impls1.foo[U]
+}
+
+object Macros2 {
+ def foo = macro Impls2.foo
+}
+
+object Macros3 {
+ def foo = macro Impls3.foo
+}
+
+object Macros4 {
+ def foo = macro Impls4.foo
+}
+
+object Macros5 {
+ def foo(x: Any) = macro Impls5.foo
+}
+
+class Macros6[T] {
+ def foo[U](x: Int) = macro Impls6.foo[T, U]
+}
+
+object Macros7 {
+ def foo(x: Int) = macro Impls7.foo
+}
+
+object Macros8 {
+ def foo(x: Int) = macro Impls8.foo
+}
+
+object Macros9 {
+ def foo(x: Int, y: Int) = macro Impls9.foo
+}
+
+object Macros10 {
+ def foo(x: Int, y: Int) = macro Impls10.foo
+}
+
+object Macros11 {
+ def foo[U] = macro Impls11.foo[U]
+}
+
+object Macros12 {
+ def foo[U] = macro Impls12.foo[U]
+}
+
+object Macros13 {
+ def foo[U <: Int] = macro Impls13.foo[U]
+}
+
+object Macros14 {
+ def foo = macro Impls14.foo
+}
+
+class D[T] {
+ class C[U] {
+ def foo15[V] = macro Impls15.foo
+ def foo16[V] = macro Impls16.foo[V]
+ }
+}
+
+object Test extends App {
+ println(Macros1.foo[String])
+ println(Macros2.foo)
+ println(Macros3.foo)
+ println(Macros4.foo)
+ println(Macros5.foo(42))
+ println(new Macros6[Int]().foo[String](42))
+ println(Macros7.foo(42))
+ println(Macros8.foo)
+ println(Macros9.foo(4, 2))
+ println(Macros10.foo(4, 2))
+ println(Macros11.foo[Int])
+ println(Macros12.foo[Int])
+ println(Macros13.foo[Int])
+ println(Macros14.foo)
+ val outer1 = new D[Int]
+ val outer2 = new outer1.C[String]
+ outer2.foo15[Boolean]
+ outer2.foo16[Boolean]
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check
index 294cfd0cf5..4c1115418b 100644
--- a/test/files/neg/macro-invalidusage-badargs.check
+++ b/test/files/neg/macro-invalidusage-badargs.check
@@ -1,6 +1,18 @@
-Macros_Test_2.scala:7: error: type mismatch;
+Macros_Test_2.scala:5: error: type mismatch;
found : String("42")
required: Int
- val s: String = foo("42")
- ^
-one error found
+ foo("42")
+ ^
+Macros_Test_2.scala:6: error: too few argument lists for macro invocation
+ foo
+ ^
+Macros_Test_2.scala:7: error: Int does not take parameters
+ foo(4)(2)
+ ^
+Macros_Test_2.scala:8: error: macro applications do not support named and/or default arguments
+ foo()
+ ^
+Macros_Test_2.scala:9: error: too many arguments for macro method foo: (x: Int)Int
+ foo(4, 2)
+ ^
+5 errors found
diff --git a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
index a6af1bb277..0b3ca0590b 100644
--- a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
@@ -1,8 +1,10 @@
-object Macros {
- def foo(x: Int) = macro Impls.foo
-}
+object Macros { def foo(x: Int) = macro Impls.foo }
+import Macros._
object Test extends App {
- import Macros._
- val s: String = foo("42")
+ foo("42")
+ foo
+ foo(4)(2)
+ foo()
+ foo(4, 2)
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds-a.check b/test/files/neg/macro-invalidusage-badbounds.check
index 277f407d38..277f407d38 100644
--- a/test/files/neg/macro-invalidusage-badbounds-a.check
+++ b/test/files/neg/macro-invalidusage-badbounds.check
diff --git a/test/files/neg/macro-invalidimpl-e.flags b/test/files/neg/macro-invalidusage-badbounds.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-e.flags
+++ b/test/files/neg/macro-invalidusage-badbounds.flags
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
index 6ee71a3628..6ee71a3628 100644
--- a/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala
index 3139599108..3139599108 100644
--- a/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-badbounds/Macros_Test_2.scala
diff --git a/test/files/neg/macro-invalidusage-badtargs-untyped.check b/test/files/neg/macro-invalidusage-badtargs-untyped.check
new file mode 100644
index 0000000000..1678180281
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badtargs-untyped.check
@@ -0,0 +1,18 @@
+Macros_Test_2.scala:11: error: macro method foo1: (x: _)Int does not take type parameters.
+ foo1[String](42)
+ ^
+Macros_Test_2.scala:12: error: wrong number of type parameters for macro method foo2: [T](x: _)Int
+ foo2[String, String](42)
+ ^
+Macros_Test_2.scala:13: error: wrong number of type parameters for macro method foo3: [T, U](x: _)Int
+ foo3[String](42)
+ ^
+Macros_Test_2.scala:14: error: String takes no type parameters, expected: one
+ foo4[String](42)
+ ^
+Macros_Test_2.scala:15: error: kinds of the type arguments (List) do not conform to the expected kinds of the type parameters (type T).
+List's type parameters do not match type T's expected parameters:
+type A has no type parameters, but type U has one
+ foo5[List](42)
+ ^
+5 errors found
diff --git a/test/files/neg/macro-invalidimpl-f.flags b/test/files/neg/macro-invalidusage-badtargs-untyped.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-f.flags
+++ b/test/files/neg/macro-invalidusage-badtargs-untyped.flags
diff --git a/test/files/neg/macro-invalidusage-badtargs.check b/test/files/neg/macro-invalidusage-badtargs.check
index 73801ab43e..6a9e1d6e6b 100644
--- a/test/files/neg/macro-invalidusage-badtargs.check
+++ b/test/files/neg/macro-invalidusage-badtargs.check
@@ -1,4 +1,18 @@
-Macros_Test_2.scala:7: error: macro method foo: (x: Int)Int does not take type parameters.
- val s: String = foo[String](42)
- ^
-one error found
+Macros_Test_2.scala:11: error: macro method foo1: (x: Int)Int does not take type parameters.
+ foo1[String](42)
+ ^
+Macros_Test_2.scala:12: error: wrong number of type parameters for macro method foo2: [T](x: Int)Int
+ foo2[String, String](42)
+ ^
+Macros_Test_2.scala:13: error: wrong number of type parameters for macro method foo3: [T, U](x: Int)Int
+ foo3[String](42)
+ ^
+Macros_Test_2.scala:14: error: String takes no type parameters, expected: one
+ foo4[String](42)
+ ^
+Macros_Test_2.scala:15: error: kinds of the type arguments (List) do not conform to the expected kinds of the type parameters (type T).
+List's type parameters do not match type T's expected parameters:
+type A has no type parameters, but type U has one
+ foo5[List](42)
+ ^
+5 errors found
diff --git a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
index c54093b637..fd16d163c3 100644
--- a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
@@ -1,8 +1,16 @@
object Macros {
- def foo(x: Int) = macro Impls.foo
+ def foo1(x: Int) = macro Impls.foo
+ def foo2[T](x: Int) = macro Impls.foo
+ def foo3[T, U](x: Int) = macro Impls.foo
+ def foo4[T[_]](x: Int) = macro Impls.foo
+ def foo5[T[U[_]]](x: Int) = macro Impls.foo
}
object Test extends App {
import Macros._
- val s: String = foo[String](42)
+ foo1[String](42)
+ foo2[String, String](42)
+ foo3[String](42)
+ foo4[String](42)
+ foo5[List](42)
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-nontypeable.check b/test/files/neg/macro-invalidusage-nontypeable.check
new file mode 100644
index 0000000000..88e6057e5e
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-nontypeable.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: not found: value IDoNotExist
+ Macros.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-g.flags b/test/files/neg/macro-invalidusage-nontypeable.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-g.flags
+++ b/test/files/neg/macro-invalidusage-nontypeable.flags
diff --git a/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala b/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala
new file mode 100644
index 0000000000..869a5a41fa
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-nontypeable/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.universe._
+ val body = Ident(TermName("IDoNotExist"))
+ c.Expr[Int](body)
+ }
+}
+
+object Macros {
+ def foo = macro Impls.foo
+} \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-b/Test_2.scala b/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala
index 7dffc5107d..acfddae942 100644
--- a/test/files/run/macro-def-path-dependent-b/Test_2.scala
+++ b/test/files/neg/macro-invalidusage-nontypeable/Test_2.scala
@@ -1,3 +1,3 @@
object Test extends App {
- println("it works")
+ Macros.foo
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper.check b/test/files/neg/macro-invalidusage-presuper.check
new file mode 100644
index 0000000000..f63a0eef80
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-presuper.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:3: error: only type definitions and concrete field definitions allowed in early object initialization section
+class D extends { def x = macro impl } with AnyRef
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-h.flags b/test/files/neg/macro-invalidusage-presuper.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-h.flags
+++ b/test/files/neg/macro-invalidusage-presuper.flags
diff --git a/test/files/neg/macro-invalidusage-presuper/Impls_1.scala b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
new file mode 100644
index 0000000000..b39a037c47
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.Context
+
+object Impls {
+ def impl(c: Context) = c.literalUnit
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala
new file mode 100644
index 0000000000..ff46a5915f
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-presuper/Macros_Test_2.scala
@@ -0,0 +1,3 @@
+import Impls._
+
+class D extends { def x = macro impl } with AnyRef \ No newline at end of file
diff --git a/test/files/neg/macro-qmarkqmarkqmark.check b/test/files/neg/macro-qmarkqmarkqmark.check
new file mode 100644
index 0000000000..bc3e25edaf
--- /dev/null
+++ b/test/files/neg/macro-qmarkqmarkqmark.check
@@ -0,0 +1,13 @@
+macro-qmarkqmarkqmark.scala:5: error: macro implementation is missing
+ foo1
+ ^
+macro-qmarkqmarkqmark.scala:8: error: too few argument lists for macro invocation
+ foo2
+ ^
+macro-qmarkqmarkqmark.scala:9: error: macro implementation is missing
+ foo2(1)
+ ^
+macro-qmarkqmarkqmark.scala:12: error: macro implementation is missing
+ foo3[Int]
+ ^
+four errors found
diff --git a/test/files/neg/macro-qmarkqmarkqmark.scala b/test/files/neg/macro-qmarkqmarkqmark.scala
new file mode 100644
index 0000000000..c8d8550fd8
--- /dev/null
+++ b/test/files/neg/macro-qmarkqmarkqmark.scala
@@ -0,0 +1,13 @@
+import language.experimental.macros
+
+object Macros {
+ def foo1 = macro ???
+ foo1
+
+ def foo2(x: Int) = macro ???
+ foo2
+ foo2(1)
+
+ def foo3[T] = macro ???
+ foo3[Int]
+} \ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check
index ae6c6c695a..ec194be3a9 100644
--- a/test/files/neg/macro-without-xmacros-a.check
+++ b/test/files/neg/macro-without-xmacros-a.check
@@ -1,5 +1,5 @@
Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
This can be achieved by adding the import clause 'import scala.language.experimental.macros'
or by setting the compiler option -language:experimental.macros.
See the Scala docs for value scala.language.experimental.macros for a discussion
@@ -7,11 +7,11 @@ why the feature needs to be explicitly enabled.
def foo(x: Int): Int = macro foo_impl
^
Macros_2.scala:7: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def bar(x: Int): Int = macro bar_impl
^
Macros_2.scala:11: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def quux(x: Int): Int = macro quux_impl
^
three errors found
diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check
index c3cadcf36a..c97850f0a9 100644
--- a/test/files/neg/macro-without-xmacros-b.check
+++ b/test/files/neg/macro-without-xmacros-b.check
@@ -1,5 +1,5 @@
Macros_2.scala:3: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
This can be achieved by adding the import clause 'import scala.language.experimental.macros'
or by setting the compiler option -language:experimental.macros.
See the Scala docs for value scala.language.experimental.macros for a discussion
@@ -7,11 +7,11 @@ why the feature needs to be explicitly enabled.
def foo(x: Int): Int = macro Impls.foo_impl
^
Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def bar(x: Int): Int = macro Impls.bar_impl
^
Macros_2.scala:9: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def quux(x: Int): Int = macro Impls.quux_impl
^
three errors found
diff --git a/test/files/neg/no-implicit-to-anyref.check b/test/files/neg/no-implicit-to-anyref.check
index d94b57a30a..fe417ad8b0 100644
--- a/test/files/neg/no-implicit-to-anyref.check
+++ b/test/files/neg/no-implicit-to-anyref.check
@@ -1,10 +1,4 @@
-no-implicit-to-anyref.scala:11: error: type mismatch;
- found : Int(1)
- required: AnyRef
-Note: an implicit exists from scala.Int => java.lang.Integer, but
-methods inherited from Object are rendered ambiguous. This is to avoid
-a blanket implicit which would convert any scala.Int to any AnyRef.
-You may wish to use a type ascription: `x: java.lang.Integer`.
+no-implicit-to-anyref.scala:11: error: the result type of an implicit conversion must be more specific than AnyRef
1: AnyRef
^
no-implicit-to-anyref.scala:17: error: type mismatch;
diff --git a/test/files/neg/raw-types-stubs.check b/test/files/neg/raw-types-stubs.check
new file mode 100644
index 0000000000..f1b26a23b7
--- /dev/null
+++ b/test/files/neg/raw-types-stubs.check
@@ -0,0 +1,11 @@
+S_3.scala:1: error: class Sub needs to be abstract, since:
+it has 2 unimplemented members.
+/** As seen from class Sub, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ def raw(x$1: M_1[_ <: String]): Unit = ???
+ def raw(x$1: Any): Unit = ???
+
+class Sub extends Raw_2 { }
+ ^
+one error found
diff --git a/test/files/neg/raw-types-stubs/M_1.java b/test/files/neg/raw-types-stubs/M_1.java
new file mode 100644
index 0000000000..6ea0d2e593
--- /dev/null
+++ b/test/files/neg/raw-types-stubs/M_1.java
@@ -0,0 +1,3 @@
+public class M_1<K extends String> { }
+
+
diff --git a/test/files/neg/raw-types-stubs/Raw_2.java b/test/files/neg/raw-types-stubs/Raw_2.java
new file mode 100644
index 0000000000..eff7df790e
--- /dev/null
+++ b/test/files/neg/raw-types-stubs/Raw_2.java
@@ -0,0 +1,4 @@
+public abstract class Raw_2 {
+ public abstract void raw(Object list);
+ public abstract void raw(M_1 list);
+}
diff --git a/test/files/neg/raw-types-stubs/S_3.scala b/test/files/neg/raw-types-stubs/S_3.scala
new file mode 100644
index 0000000000..618eedc888
--- /dev/null
+++ b/test/files/neg/raw-types-stubs/S_3.scala
@@ -0,0 +1 @@
+class Sub extends Raw_2 { }
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
index 8986b899a3..457f497f2f 100644
--- a/test/files/neg/stringinterpolation_macro-neg.check
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -66,5 +66,5 @@ Note that implicit conversions are not applicable because they are ambiguous:
^
stringinterpolation_macro-neg.scala:30: error: illegal conversion character
f"$s%i"
- ^
+ ^
15 errors found
diff --git a/test/files/neg/t414.check b/test/files/neg/t414.check
index e15dbaea71..30211eef8e 100644
--- a/test/files/neg/t414.check
+++ b/test/files/neg/t414.check
@@ -1,7 +1,7 @@
t414.scala:5: error: pattern type is incompatible with expected type;
found : Empty.type
required: IntMap[a]
-Note: if you intended to match against the class, try `case _: Empty[_]` or `case Empty()`
+Note: if you intended to match against the class, try `case Empty()`
case Empty =>
^
t414.scala:7: error: type mismatch;
diff --git a/test/files/neg/t4158.check b/test/files/neg/t4158.check
index 3ee2627c5b..af281c52cd 100644
--- a/test/files/neg/t4158.check
+++ b/test/files/neg/t4158.check
@@ -1,19 +1,7 @@
-t4158.scala:3: error: type mismatch;
- found : Null(null)
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: Integer)Int
- are possible conversion functions from Null(null) to Int
+t4158.scala:3: error: an expression of type Null is ineligible for implicit conversion
var y = null: Int
^
-t4158.scala:2: error: type mismatch;
- found : Null(null)
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: Integer)Int
- are possible conversion functions from Null(null) to Int
+t4158.scala:2: error: an expression of type Null is ineligible for implicit conversion
var x: Int = null
^
two errors found
diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check
index a60d16295f..64e7cc1ca7 100644
--- a/test/files/neg/t4515.check
+++ b/test/files/neg/t4515.check
@@ -1,6 +1,6 @@
t4515.scala:37: error: type mismatch;
- found : _0(in value $anonfun) where type _0(in value $anonfun)
- required: (some other)_0(in value $anonfun)
+ found : _$1 where type _$1
+ required: _$2
handler.onEvent(target, ctx.getEvent, node, ctx)
^
one error found
diff --git a/test/files/neg/t4584.check b/test/files/neg/t4584.check
index 419f5704b1..97d07afa0e 100644
--- a/test/files/neg/t4584.check
+++ b/test/files/neg/t4584.check
@@ -1,7 +1,7 @@
t4584.scala:1: error: error in unicode escape
-class A { val /u2
+class A { val \u2
^
-t4584.scala:1: error: illegal character '/uffff'
-class A { val /u2
+t4584.scala:1: error: illegal character '\uffff'
+class A { val \u2
^
two errors found
diff --git a/test/files/neg/t4727.check b/test/files/neg/t4727.check
index 8a4536fec3..a17cdde044 100644
--- a/test/files/neg/t4727.check
+++ b/test/files/neg/t4727.check
@@ -1,10 +1,4 @@
-t4727.scala:5: error: type mismatch;
- found : Null
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: Integer)Int
- are possible conversion functions from Null to Int
+t4727.scala:5: error: an expression of type Null is ineligible for implicit conversion
Error occurred in an application involving default arguments.
new C[Int]
^
diff --git a/test/files/neg/t4879.check b/test/files/neg/t4879.check
index 21cd329640..c7edd583c8 100644
--- a/test/files/neg/t4879.check
+++ b/test/files/neg/t4879.check
@@ -1,13 +1,13 @@
t4879.scala:6: error: pattern type is incompatible with expected type;
found : C.type
required: C
-Note: if you intended to match against the class, try `case _: C` or `case C(_)`
+Note: if you intended to match against the class, try `case C(_)`
case C => true
^
t4879.scala:10: error: pattern type is incompatible with expected type;
found : D.type
required: D[T,U,V]
-Note: if you intended to match against the class, try `case _: D[_,_,_]` or `case D(_,_,_)`
+Note: if you intended to match against the class, try `case D(_,_,_)`
case D => true
^
two errors found
diff --git a/test/files/neg/t5510.check b/test/files/neg/t5510.check
index 04220e79bb..322a2f5e25 100644
--- a/test/files/neg/t5510.check
+++ b/test/files/neg/t5510.check
@@ -1,15 +1,15 @@
t5510.scala:2: error: unclosed string literal
val s1 = s"xxx
- ^
+ ^
t5510.scala:3: error: unclosed string literal
val s2 = s"xxx $x
^
t5510.scala:4: error: unclosed string literal
val s3 = s"xxx $$
- ^
+ ^
t5510.scala:5: error: unclosed string literal
val s4 = ""s"
- ^
+ ^
t5510.scala:6: error: unclosed multi-line string literal
val s5 = ""s""" $s1 $s2 s"
^
diff --git a/test/files/neg/t5580b.check b/test/files/neg/t5580b.check
new file mode 100644
index 0000000000..45fde46ff9
--- /dev/null
+++ b/test/files/neg/t5580b.check
@@ -0,0 +1,6 @@
+t5580b.scala:11: error: polymorphic expression cannot be instantiated to expected type;
+ found : [A]scala.collection.mutable.Set[A]
+ required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]]
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ ^
+one error found
diff --git a/test/files/pos/t5580b.scala b/test/files/neg/t5580b.scala
index d5a4a0a2b2..2161da4584 100644
--- a/test/files/pos/t5580b.scala
+++ b/test/files/neg/t5580b.scala
@@ -1,9 +1,3 @@
-/** It's a pos test because it does indeed compile,
- * not so much because I'm glad it does. Testing
- * that error messages created and discarded during
- * implicit search don't blow it up.
- */
-
import scala.collection.mutable.WeakHashMap
import scala.collection.JavaConversions._
diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check
index 12e93ff373..732e4f44d0 100644
--- a/test/files/neg/t5663-badwarneq.check
+++ b/test/files/neg/t5663-badwarneq.check
@@ -1,24 +1,42 @@
-t5663-badwarneq.scala:42: warning: comparing case class values of types Some[Int] and None.type using `==' will always yield false
+t5663-badwarneq.scala:47: warning: comparing case class values of types Some[Int] and None.type using `==' will always yield false
println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object
^
-t5663-badwarneq.scala:43: warning: comparing case class values of types Some[Int] and Thing using `==' will always yield false
+t5663-badwarneq.scala:48: warning: comparing case class values of types Some[Int] and Thing using `==' will always yield false
println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object
^
-t5663-badwarneq.scala:51: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:56: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal
println(t1 == t2) // true, but apparently unrelated, a compromise warning
^
-t5663-badwarneq.scala:52: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:57: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal
println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated
^
-t5663-badwarneq.scala:55: warning: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
+t5663-badwarneq.scala:60: warning: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
println(t3 == Some(1)) // false, warn on different cases
^
-t5663-badwarneq.scala:56: warning: comparing values of types ThingOne and Cousin using `==' will always yield false
+t5663-badwarneq.scala:61: warning: comparing values of types ThingOne and Cousin using `==' will always yield false
println(t1 == c) // should warn
^
-t5663-badwarneq.scala:64: warning: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
+t5663-badwarneq.scala:69: warning: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
^
+t5663-badwarneq.scala:72: warning: ValueClass1 and Int are unrelated: they will never compare equal
+ println(new ValueClass1(5) == 5) // bad
+ ^
+t5663-badwarneq.scala:74: warning: comparing values of types Int and ValueClass1 using `==' will always yield false
+ println(5 == new ValueClass1(5)) // bad
+ ^
+t5663-badwarneq.scala:78: warning: ValueClass2[String] and String are unrelated: they will never compare equal
+ println(new ValueClass2("abc") == "abc") // bad
+ ^
+t5663-badwarneq.scala:79: warning: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal
+ println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
+ ^
+t5663-badwarneq.scala:81: warning: comparing values of types ValueClass3 and ValueClass2[Int] using `==' will always yield false
+ println(ValueClass3(5) == new ValueClass2(5)) // bad
+ ^
+t5663-badwarneq.scala:82: warning: comparing values of types ValueClass3 and Int using `==' will always yield false
+ println(ValueClass3(5) == 5) // bad
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-7 warnings found
+13 warnings found
one error found
diff --git a/test/files/neg/t5663-badwarneq.scala b/test/files/neg/t5663-badwarneq.scala
index 56ec389c03..3c0376914e 100644
--- a/test/files/neg/t5663-badwarneq.scala
+++ b/test/files/neg/t5663-badwarneq.scala
@@ -17,6 +17,11 @@ class SimpleParent
case class Simple() extends SimpleParent
case object SimpleSibling extends SimpleParent
+// value classes
+final class ValueClass1(val value: Int) extends AnyVal
+final class ValueClass2[T](val value: T) extends AnyVal
+final case class ValueClass3(val value: Int) extends AnyVal
+
/* It's not possible to run partest without -deprecation.
* Since detecting the warnings requires a neg test with
* -Xfatal-warnings, and deprecation terminates the compile,
@@ -63,6 +68,19 @@ object Test {
println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
+ println(new ValueClass1(5) == new ValueClass1(5)) // ok
+ println(new ValueClass1(5) == 5) // bad
+ println(new ValueClass1(5) == (5: Any)) // ok, have to let it through
+ println(5 == new ValueClass1(5)) // bad
+ println((5: Any) == new ValueClass1(5) == (5: Any)) // ok
+
+ println(new ValueClass2("abc") == new ValueClass2("abc")) // ok
+ println(new ValueClass2("abc") == "abc") // bad
+ println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
+ println(ValueClass3(5) == new ValueClass3(5)) // ok
+ println(ValueClass3(5) == new ValueClass2(5)) // bad
+ println(ValueClass3(5) == 5) // bad
+
/*
val mine = new MyThing
val some = new SomeThing
diff --git a/test/files/neg/t5689.check b/test/files/neg/t5689.check
index 50aaa7dbfe..ad9b79cdcb 100644
--- a/test/files/neg/t5689.check
+++ b/test/files/neg/t5689.check
@@ -3,5 +3,5 @@ t5689.scala:4: error: macro implementation has wrong shape:
found : (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[Int]
type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String]
def returnsString(i: Double): String = macro returnsIntImpl
- ^
+ ^
one error found
diff --git a/test/files/neg/t5856.check b/test/files/neg/t5856.check
index ac49d4b9ac..08a61bdc07 100644
--- a/test/files/neg/t5856.check
+++ b/test/files/neg/t5856.check
@@ -1,6 +1,6 @@
t5856.scala:10: error: invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected
val s9 = s"$"
- ^
+ ^
t5856.scala:10: error: unclosed string literal
val s9 = s"$"
^
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
index f91df0c46d..16c90ede7e 100644
--- a/test/files/neg/t6040.check
+++ b/test/files/neg/t6040.check
@@ -1,5 +1,5 @@
t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
diff --git a/test/files/neg/t6138.check b/test/files/neg/t6138.check
new file mode 100644
index 0000000000..8fd9978248
--- /dev/null
+++ b/test/files/neg/t6138.check
@@ -0,0 +1,7 @@
+t6138.scala:4: error: ambiguous reference to overloaded definition,
+both method getClass in object definitions of type (s: Int)Any
+and method getClass in object definitions of type (s: String)Any
+match argument types (Nothing)
+ getClass(???): String
+ ^
+one error found
diff --git a/test/files/neg/t6138.scala b/test/files/neg/t6138.scala
new file mode 100644
index 0000000000..2f45a46b1c
--- /dev/null
+++ b/test/files/neg/t6138.scala
@@ -0,0 +1,5 @@
+object definitions {
+ def getClass(s: String): Any = ???
+ def getClass(s: Int): Any = ???
+ getClass(???): String
+}
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
index e98fa79eb7..13c78030d9 100644
--- a/test/files/neg/t6162-inheritance.check
+++ b/test/files/neg/t6162-inheritance.check
@@ -1,16 +1,16 @@
-t6162-inheritance.scala:6: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
class SubFoo extends Foo
^
-t6162-inheritance.scala:11: warning: inheritance from trait T in package t6126 is deprecated
+usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated
object SubT extends T
^
-t6162-inheritance.scala:17: warning: inheritance from trait S in package t6126 is deprecated
+usage.scala:8: warning: inheritance from trait S in package t6126 is deprecated
new S {
^
-t6162-inheritance.scala:6: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
class SubFoo extends Foo
^
-t6162-inheritance.scala:11: warning: inheritance from trait T in package t6126 is deprecated
+usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated
object SubT extends T
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t6162-inheritance/defn.scala b/test/files/neg/t6162-inheritance/defn.scala
new file mode 100644
index 0000000000..bb582d27b0
--- /dev/null
+++ b/test/files/neg/t6162-inheritance/defn.scala
@@ -0,0 +1,10 @@
+package scala.t6126
+
+@deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
+class Foo
+
+@deprecatedInheritance()
+trait T
+
+@deprecatedInheritance()
+trait S
diff --git a/test/files/neg/t6162-inheritance/usage.scala b/test/files/neg/t6162-inheritance/usage.scala
new file mode 100644
index 0000000000..097e4f5903
--- /dev/null
+++ b/test/files/neg/t6162-inheritance/usage.scala
@@ -0,0 +1,10 @@
+package scala.t6126
+
+class SubFoo extends Foo
+
+object SubT extends T
+
+object O {
+ new S {
+ }
+}
diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check
index 4fea66f760..19425a68b0 100644
--- a/test/files/neg/t6406-regextract.check
+++ b/test/files/neg/t6406-regextract.check
@@ -1,7 +1,6 @@
-t6406-regextract.scala:4: error: cannot resolve overloaded unapply
+t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated: Extracting a match result from anything but a CharSequence or Match is deprecated
List(1) collect { case r(i) => i }
^
-t6406-regextract.scala:4: error: not found: value i
- List(1) collect { case r(i) => i }
- ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t6534.check b/test/files/neg/t6534.check
index 52e70cfa8a..c2e80b377a 100644
--- a/test/files/neg/t6534.check
+++ b/test/files/neg/t6534.check
@@ -1,9 +1,3 @@
-t6534.scala:4: warning: Implementation of equals inherited from trait Foo overridden in class Bippy1 to enforce value class semantics
-class Bippy1(val x: Int) extends AnyVal with Foo { } // warn
- ^
-t6534.scala:5: warning: Implementation of hashCode inherited from trait Ding overridden in class Bippy2 to enforce value class semantics
-class Bippy2(val x: Int) extends AnyVal with Ding { } // warn
- ^
t6534.scala:6: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class
class Bippy3(val x: Int) extends AnyVal { override def equals(x: Any) = false } // error
^
@@ -13,5 +7,4 @@ class Bippy4(val x: Int) extends AnyVal { override def hashCode = -1 }
t6534.scala:9: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class
case class Bippy6(val x: Int) extends AnyVal { override def productPrefix = "Dingo" ; override def equals(x: Any) = false } // error
^
-two warnings found
three errors found
diff --git a/test/files/neg/t6771b.check b/test/files/neg/t6771b.check
new file mode 100644
index 0000000000..ba99e9178d
--- /dev/null
+++ b/test/files/neg/t6771b.check
@@ -0,0 +1,6 @@
+t6771b.scala:14: error: type mismatch;
+ found : x.type (with underlying type String)
+ required: Test.a.type
+ b = b match { case x => x }
+ ^
+one error found
diff --git a/test/files/neg/t6771b.scala b/test/files/neg/t6771b.scala
new file mode 100644
index 0000000000..78f11f7750
--- /dev/null
+++ b/test/files/neg/t6771b.scala
@@ -0,0 +1,16 @@
+// Currently, the pattern matcher widens the type of the
+// scrutinee, so this doesn't typecheck. This test just
+// confirms this behaviour, although it would be an improvement
+// to change this and make this a `pos` test.
+//
+// But, to the intrepid hacker who works on this, a few notes:
+// You'll have to look into places in the pattern matcher that
+// call `dealias`, and see if they need to be `dealiasWiden`.
+// For example, if `checkableType` used only `dealias`, `pos/t6671.scala`
+// would fail.
+object Test {
+ val a = ""; var b: a.type = a
+
+ b = b match { case x => x }
+}
+
diff --git a/test/files/neg/t6815.check b/test/files/neg/t6815.check
new file mode 100644
index 0000000000..fae3819be1
--- /dev/null
+++ b/test/files/neg/t6815.check
@@ -0,0 +1,5 @@
+t6815.scala:15: error: stable identifier required, but Test.this.u.emptyValDef found.
+ Note that value emptyValDef is not stable because its type, Test.u.ValDef, is volatile.
+ case _: u.emptyValDef.T => // and, unlike in pos/t6185.scala, we shouldn't allow this.
+ ^
+one error found
diff --git a/test/files/neg/t6815.scala b/test/files/neg/t6815.scala
new file mode 100644
index 0000000000..ff973a7437
--- /dev/null
+++ b/test/files/neg/t6815.scala
@@ -0,0 +1,17 @@
+trait U {
+ trait ValOrDefDefApi {
+ def name: Any
+ }
+ type ValOrDefDef <: ValOrDefDefApi
+ type ValDef <: ValOrDefDef with ValDefApi { type T }
+ trait ValDefApi extends ValOrDefDefApi { this: ValDef => }
+ val emptyValDef: ValDef // the result type is volatile
+}
+
+object Test {
+ val u: U = ???
+
+ (null: Any) match {
+ case _: u.emptyValDef.T => // and, unlike in pos/t6185.scala, we shouldn't allow this.
+ }
+}
diff --git a/test/files/neg/t6889.check b/test/files/neg/t6889.check
new file mode 100644
index 0000000000..a77e8a010c
--- /dev/null
+++ b/test/files/neg/t6889.check
@@ -0,0 +1,7 @@
+t6889.scala:16: error: the result type of an implicit conversion must be more specific than AnyRef
+ def f(x: Dingo): AnyRef = x // fail - no conversion to AnyRef
+ ^
+t6889.scala:17: error: an expression of type Null is ineligible for implicit conversion
+ var x: Int = null // fail - no conversion from Null
+ ^
+two errors found
diff --git a/test/files/neg/t6889.scala b/test/files/neg/t6889.scala
new file mode 100644
index 0000000000..ef1963669c
--- /dev/null
+++ b/test/files/neg/t6889.scala
@@ -0,0 +1,18 @@
+package bippy {
+ trait Bippy[A] extends Any
+}
+package foo {
+ package object unrelated {
+ implicit def bippyDingo[A](x: bippy.Bippy[A]): AnyRef = Nil
+ }
+ package unrelated {
+ trait Unrelated
+ }
+}
+
+object Test {
+ trait Dingo extends Any with bippy.Bippy[foo.unrelated.Unrelated]
+
+ def f(x: Dingo): AnyRef = x // fail - no conversion to AnyRef
+ var x: Int = null // fail - no conversion from Null
+}
diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check
index f1e1881404..1a591d02c6 100644
--- a/test/files/neg/t6952.check
+++ b/test/files/neg/t6952.check
@@ -1,5 +1,5 @@
t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
@@ -7,7 +7,7 @@ why the feature needs to be explicitly enabled.
trait B extends Dynamic
^
t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
trait C extends A with Dynamic
^
two errors found
diff --git a/test/files/neg/t696.check b/test/files/neg/t696.check
index ac26a864a5..b7bc5cdf98 100644
--- a/test/files/neg/t696.check
+++ b/test/files/neg/t696.check
@@ -1,5 +1,9 @@
-t696.scala:4: error: diverging implicit expansion for type TypeUtil0.Type[Any]
+t696.scala:5: error: diverging implicit expansion for type TypeUtil0.Type[Any]
starting with method WithType in object TypeUtil0
- as[Any](null);
+ as[Any](null)
^
-one error found
+t696.scala:6: error: diverging implicit expansion for type TypeUtil0.Type[X]
+starting with method WithType in object TypeUtil0
+ def foo[X]() = as[X](null)
+ ^
+two errors found
diff --git a/test/files/neg/t696.scala b/test/files/neg/t696.scala
index a06a32141a..ca76f7ef6c 100644
--- a/test/files/neg/t696.scala
+++ b/test/files/neg/t696.scala
@@ -1,6 +1,7 @@
object TypeUtil0 {
- trait Type[+T];
+ trait Type[+T]
implicit def WithType[S,T](implicit tpeS : Type[S], tpeT : Type[T]) : Type[S with T] = null
- as[Any](null);
- def as[T](x : Any)(implicit tpe : Type[T]) = null;
+ def as[T](x : Any)(implicit tpe : Type[T]) = null
+ as[Any](null)
+ def foo[X]() = as[X](null)
}
diff --git a/test/files/neg/t7110.check b/test/files/neg/t7110.check
new file mode 100644
index 0000000000..e484dc4325
--- /dev/null
+++ b/test/files/neg/t7110.check
@@ -0,0 +1,6 @@
+t7110.scala:2: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.
+ try { ??? } // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7110.flags b/test/files/neg/t7110.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7110.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7110.scala b/test/files/neg/t7110.scala
new file mode 100644
index 0000000000..79ac325216
--- /dev/null
+++ b/test/files/neg/t7110.scala
@@ -0,0 +1,6 @@
+object Test {
+ try { ??? } // warn
+
+ try { ??? } finally ??? // no warn
+ try { ??? } catch { case _: Throwable => } // no warn
+}
diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check
new file mode 100644
index 0000000000..c6a7af9a23
--- /dev/null
+++ b/test/files/neg/t7157.check
@@ -0,0 +1,73 @@
+Test_2.scala:5: error: too many arguments for macro method m1_0_0: ()Unit
+ m1_0_0(1)
+ ^
+Test_2.scala:6: error: too many arguments for macro method m1_0_0: ()Unit
+ m1_0_0(1, 2)
+ ^
+Test_2.scala:7: error: too many arguments for macro method m1_0_0: ()Unit
+ m1_0_0(1, 2, 3)
+ ^
+Test_2.scala:9: error: macro applications do not support named and/or default arguments
+ m1_1_1()
+ ^
+Test_2.scala:11: error: too many arguments for macro method m1_1_1: (x: Int)Unit
+ m1_1_1(1, 2)
+ ^
+Test_2.scala:12: error: too many arguments for macro method m1_1_1: (x: Int)Unit
+ m1_1_1(1, 2, 3)
+ ^
+Test_2.scala:14: error: macro applications do not support named and/or default arguments
+ m1_2_2()
+ ^
+Test_2.scala:15: error: macro applications do not support named and/or default arguments
+ m1_2_2(1)
+ ^
+Test_2.scala:17: error: too many arguments for macro method m1_2_2: (x: Int, y: Int)Unit
+ m1_2_2(1, 2, 3)
+ ^
+Test_2.scala:24: error: macro applications do not support named and/or default arguments
+ m1_1_inf()
+ ^
+Test_2.scala:29: error: macro applications do not support named and/or default arguments
+ m1_2_inf()
+ ^
+Test_2.scala:30: error: macro applications do not support named and/or default arguments
+ m1_2_inf(1)
+ ^
+Test_2.scala:35: error: too many arguments for macro method m2_0_0: ()Unit
+ m2_0_0()(1)
+ ^
+Test_2.scala:36: error: too many arguments for macro method m2_0_0: ()Unit
+ m2_0_0()(1, 2)
+ ^
+Test_2.scala:37: error: too many arguments for macro method m2_0_0: ()Unit
+ m2_0_0()(1, 2, 3)
+ ^
+Test_2.scala:39: error: macro applications do not support named and/or default arguments
+ m2_1_1()()
+ ^
+Test_2.scala:41: error: too many arguments for macro method m2_1_1: (x: Int)Unit
+ m2_1_1()(1, 2)
+ ^
+Test_2.scala:42: error: too many arguments for macro method m2_1_1: (x: Int)Unit
+ m2_1_1()(1, 2, 3)
+ ^
+Test_2.scala:44: error: macro applications do not support named and/or default arguments
+ m2_2_2()()
+ ^
+Test_2.scala:45: error: macro applications do not support named and/or default arguments
+ m2_2_2()(1)
+ ^
+Test_2.scala:47: error: too many arguments for macro method m2_2_2: (x: Int, y: Int)Unit
+ m2_2_2()(1, 2, 3)
+ ^
+Test_2.scala:54: error: macro applications do not support named and/or default arguments
+ m2_1_inf()()
+ ^
+Test_2.scala:59: error: macro applications do not support named and/or default arguments
+ m2_2_inf()()
+ ^
+Test_2.scala:60: error: macro applications do not support named and/or default arguments
+ m2_2_inf()(1)
+ ^
+24 errors found
diff --git a/test/files/neg/t7157/Impls_Macros_1.scala b/test/files/neg/t7157/Impls_Macros_1.scala
new file mode 100644
index 0000000000..09f423fbab
--- /dev/null
+++ b/test/files/neg/t7157/Impls_Macros_1.scala
@@ -0,0 +1,32 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl1_0_0(c: Context)() = c.literalUnit
+ def impl1_1_1(c: Context)(x: c.Expr[Int]) = c.literalUnit
+ def impl1_2_2(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = c.literalUnit
+ def m1_0_0() = macro impl1_0_0
+ def m1_1_1(x: Int) = macro impl1_1_1
+ def m1_2_2(x: Int, y: Int) = macro impl1_2_2
+
+ def impl1_0_inf(c: Context)(x: c.Expr[Int]*) = c.literalUnit
+ def impl1_1_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int]*) = c.literalUnit
+ def impl1_2_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = c.literalUnit
+ def m1_0_inf(x: Int*) = macro impl1_0_inf
+ def m1_1_inf(x: Int, y: Int*) = macro impl1_1_inf
+ def m1_2_inf(x: Int, y: Int, z: Int*) = macro impl1_2_inf
+
+ def impl2_0_0(c: Context)()() = c.literalUnit
+ def impl2_1_1(c: Context)()(x: c.Expr[Int]) = c.literalUnit
+ def impl2_2_2(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]) = c.literalUnit
+ def m2_0_0()() = macro impl2_0_0
+ def m2_1_1()(x: Int) = macro impl2_1_1
+ def m2_2_2()(x: Int, y: Int) = macro impl2_2_2
+
+ def impl2_0_inf(c: Context)()(x: c.Expr[Int]*) = c.literalUnit
+ def impl2_1_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]*) = c.literalUnit
+ def impl2_2_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = c.literalUnit
+ def m2_0_inf()(x: Int*) = macro impl2_0_inf
+ def m2_1_inf()(x: Int, y: Int*) = macro impl2_1_inf
+ def m2_2_inf()(x: Int, y: Int, z: Int*) = macro impl2_2_inf
+} \ No newline at end of file
diff --git a/test/files/neg/t7157/Test_2.scala b/test/files/neg/t7157/Test_2.scala
new file mode 100644
index 0000000000..45a6026399
--- /dev/null
+++ b/test/files/neg/t7157/Test_2.scala
@@ -0,0 +1,63 @@
+import Macros._
+
+object Test extends App {
+ m1_0_0()
+ m1_0_0(1)
+ m1_0_0(1, 2)
+ m1_0_0(1, 2, 3)
+
+ m1_1_1()
+ m1_1_1(1)
+ m1_1_1(1, 2)
+ m1_1_1(1, 2, 3)
+
+ m1_2_2()
+ m1_2_2(1)
+ m1_2_2(1, 2)
+ m1_2_2(1, 2, 3)
+
+ m1_0_inf()
+ m1_0_inf(1)
+ m1_0_inf(1, 2)
+ m1_0_inf(1, 2, 3)
+
+ m1_1_inf()
+ m1_1_inf(1)
+ m1_1_inf(1, 2)
+ m1_1_inf(1, 2, 3)
+
+ m1_2_inf()
+ m1_2_inf(1)
+ m1_2_inf(1, 2)
+ m1_2_inf(1, 2, 3)
+
+ m2_0_0()()
+ m2_0_0()(1)
+ m2_0_0()(1, 2)
+ m2_0_0()(1, 2, 3)
+
+ m2_1_1()()
+ m2_1_1()(1)
+ m2_1_1()(1, 2)
+ m2_1_1()(1, 2, 3)
+
+ m2_2_2()()
+ m2_2_2()(1)
+ m2_2_2()(1, 2)
+ m2_2_2()(1, 2, 3)
+
+ m2_0_inf()()
+ m2_0_inf()(1)
+ m2_0_inf()(1, 2)
+ m2_0_inf()(1, 2, 3)
+
+ m2_1_inf()()
+ m2_1_inf()(1)
+ m2_1_inf()(1, 2)
+ m2_1_inf()(1, 2, 3)
+
+ m2_2_inf()()
+ m2_2_inf()(1)
+ m2_2_inf()(1, 2)
+ m2_2_inf()(1, 2, 3)
+} \ No newline at end of file
diff --git a/test/files/neg/t7289.check b/test/files/neg/t7289.check
new file mode 100644
index 0000000000..e4aeebbc6c
--- /dev/null
+++ b/test/files/neg/t7289.check
@@ -0,0 +1,4 @@
+t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[scala.collection.immutable.Nil.type]
+ implicitly[Schtroumpf[Nil.type]]
+ ^
+one error found
diff --git a/test/files/neg/t7289.scala b/test/files/neg/t7289.scala
new file mode 100644
index 0000000000..f4ed3daf76
--- /dev/null
+++ b/test/files/neg/t7289.scala
@@ -0,0 +1,39 @@
+object Test extends App {
+ trait Schtroumpf[T]
+
+ implicit def schtroumpf[T, U <: Coll[T], Coll[X] <: Traversable[X]]
+ (implicit minorSchtroumpf: Schtroumpf[T]): Schtroumpf[U] = ???
+
+ implicit val qoo: Schtroumpf[Int] = new Schtroumpf[Int]{}
+ implicitly[Schtroumpf[Nil.type]]
+}
+
+/*
+info1 = {scala.tools.nsc.typechecker.Implicits$ImplicitInfo@3468}"qoo: => Test.Schtroumpf[Int]"
+info2 = {scala.tools.nsc.typechecker.Implicits$ImplicitInfo@3469}"schtroumpf: [T, U <: Coll[T], Coll[_] <: Traversable[_]](implicit minorSchtroumpf: Test.Schtroumpf[T])Test.Schtroumpf[U]"
+isStrictlyMoreSpecific(info1, info2)
+ isSubType(Test.Schtroumpf[Int], Test.Schtroumpf[U] forSome { T; U <: Coll[T]; Coll[_] <: Traversable[_] })
+ isAsSpecificValueType(Test.Schtroumpf[Int], Test.Schtroumpf[U], undef2 = List(type T, type U, type Coll))
+
+ val et: ExistentialType = Test.Schtroumpf[U] forSome { T; U <: Coll[T]; Coll[_] <: Traversable[_] }
+ val tp1 = Test.Schtroumpf[Int]
+ et.withTypeVars(isSubType(tp1, _, depth))
+ solve()
+ tvars = tList(=?Nothing, =?Int, =?=?Int)
+
+
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?U ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?Coll ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing )
+[ setInst] scala.collection.immutable.Nil.type( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=scala.collection.immutable.Nil.type )
+[ setInst] =?scala.collection.immutable.Nil.type( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?scala.collection.immutable.Nil.type )
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Int )
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?U ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?Coll ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing )
+[ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=Int )
+[ setInst] =?Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?Int )
+*/ \ No newline at end of file
diff --git a/test/files/neg/t7289_status_quo.check b/test/files/neg/t7289_status_quo.check
new file mode 100644
index 0000000000..31c072e969
--- /dev/null
+++ b/test/files/neg/t7289_status_quo.check
@@ -0,0 +1,22 @@
+t7289_status_quo.scala:9: error: could not find implicit value for parameter e: Test1.Ext[List[Int]]
+ implicitly[Ext[List[Int]]] // fails - not found
+ ^
+t7289_status_quo.scala:11: error: could not find implicit value for parameter e: Test1.Ext[List[List[List[Int]]]]
+ implicitly[Ext[List[List[List[Int]]]]] // fails - not found
+ ^
+t7289_status_quo.scala:15: error: ambiguous implicit values:
+ both method f in object Test1 of type [A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Test1.Ext[A])Test1.Ext[Coll]
+ and value m in object Test1 of type => Test1.Ext[List[List[Int]]]
+ match expected type Test1.Ext[_ <: List[List[Int]]]
+ implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous
+ ^
+t7289_status_quo.scala:20: error: could not find implicit value for parameter e: Test1.ExtCov[List[Int]]
+ implicitly[ExtCov[List[Int]]] // fails - not found
+ ^
+t7289_status_quo.scala:21: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[Int]]]
+ implicitly[ExtCov[List[List[Int]]]] // fails - not found
+ ^
+t7289_status_quo.scala:22: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[List[Int]]]]
+ implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found
+ ^
+6 errors found
diff --git a/test/files/neg/t7289_status_quo.scala b/test/files/neg/t7289_status_quo.scala
new file mode 100644
index 0000000000..39621429a1
--- /dev/null
+++ b/test/files/neg/t7289_status_quo.scala
@@ -0,0 +1,23 @@
+// record the status quo after this fix
+// not clear to @adriaanm why an upper-bounded existential in an invariant position
+// is different from putting that upper bound in a covariant position
+object Test1 {
+ trait Ext[T]
+ implicit def f[A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Ext[A]): Ext[Coll] = ???
+ implicit val m: Ext[List[List[Int]]] = new Ext[List[List[Int]]]{}
+
+ implicitly[Ext[List[Int]]] // fails - not found
+ implicitly[Ext[List[List[Int]]]] // compiles
+ implicitly[Ext[List[List[List[Int]]]]] // fails - not found
+
+ // Making Ext[+T] should incur the same behavior as these. (so says @paulp)
+ implicitly[Ext[_ <: List[Int]]] // compiles
+ implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous
+ implicitly[Ext[_ <: List[List[List[Int]]]]] // compiles
+
+ // But, we currently get:
+ trait ExtCov[+T]
+ implicitly[ExtCov[List[Int]]] // fails - not found
+ implicitly[ExtCov[List[List[Int]]]] // fails - not found
+ implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found
+} \ No newline at end of file
diff --git a/test/files/neg/t7324.check b/test/files/neg/t7324.check
new file mode 100644
index 0000000000..586947d5e7
--- /dev/null
+++ b/test/files/neg/t7324.check
@@ -0,0 +1,4 @@
+t7324.scala:2: error: Platform restriction: a parameter list's length cannot exceed 254.
+class Bar(
+ ^
+one error found
diff --git a/test/files/neg/t7324.scala b/test/files/neg/t7324.scala
new file mode 100644
index 0000000000..81d7674d68
--- /dev/null
+++ b/test/files/neg/t7324.scala
@@ -0,0 +1,57 @@
+object Bar extends App
+class Bar(
+_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int,
+_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int,
+_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int,
+_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int,
+_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int,
+_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int,
+_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int,
+_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int,
+_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int,
+_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int,
+_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int,
+_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int,
+_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int,
+_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int,
+_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int,
+_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int,
+_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int,
+_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int,
+_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int,
+_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int,
+_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int,
+_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int,
+_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int,
+_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int,
+_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int,
+_251: Int, _252: Int, _253: Int, _254: Int, _255: Int
+)
+
+class BarOK(
+_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int,
+_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int,
+_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int,
+_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int,
+_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int,
+_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int,
+_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int,
+_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int,
+_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int,
+_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int,
+_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int,
+_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int,
+_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int,
+_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int,
+_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int,
+_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int,
+_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int,
+_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int,
+_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int,
+_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int,
+_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int,
+_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int,
+_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int,
+_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int,
+_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int,
+_251: Int, _252: Int, _253: Int, _254: Int)
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
new file mode 100644
index 0000000000..709ab6db3e
--- /dev/null
+++ b/test/files/neg/t7325.check
@@ -0,0 +1,19 @@
+t7325.scala:2: error: percent signs not directly following splicees must be escaped
+ println(f"%")
+ ^
+t7325.scala:4: error: percent signs not directly following splicees must be escaped
+ println(f"%%%")
+ ^
+t7325.scala:6: error: percent signs not directly following splicees must be escaped
+ println(f"%%%%%")
+ ^
+t7325.scala:16: error: wrong conversion string
+ println(f"${0}%")
+ ^
+t7325.scala:19: error: percent signs not directly following splicees must be escaped
+ println(f"${0}%%%d")
+ ^
+t7325.scala:21: error: percent signs not directly following splicees must be escaped
+ println(f"${0}%%%%%d")
+ ^
+6 errors found
diff --git a/test/files/neg/t7325.scala b/test/files/neg/t7325.scala
new file mode 100644
index 0000000000..adfd8dd47a
--- /dev/null
+++ b/test/files/neg/t7325.scala
@@ -0,0 +1,25 @@
+object Test extends App {
+ println(f"%")
+ println(f"%%")
+ println(f"%%%")
+ println(f"%%%%")
+ println(f"%%%%%")
+ println(f"%%%%%%")
+
+ println(f"%%n")
+ println(f"%%%n")
+ println(f"%%%%n")
+ println(f"%%%%%n")
+ println(f"%%%%%%n")
+ println(f"%%%%%%%n")
+
+ println(f"${0}%")
+ println(f"${0}%d")
+ println(f"${0}%%d")
+ println(f"${0}%%%d")
+ println(f"${0}%%%%d")
+ println(f"${0}%%%%%d")
+
+ println(f"${0}%n")
+ println(f"${0}%d%n")
+} \ No newline at end of file
diff --git a/test/files/neg/t7330.check b/test/files/neg/t7330.check
new file mode 100644
index 0000000000..b96d656d2b
--- /dev/null
+++ b/test/files/neg/t7330.check
@@ -0,0 +1,5 @@
+t7330.scala:4: error: pattern must be a value: Y[_]
+Note: if you intended to match against the class, try `case _: Y[_]`
+ 0 match { case Y[_] => }
+ ^
+one error found
diff --git a/test/files/neg/t7330.scala b/test/files/neg/t7330.scala
new file mode 100644
index 0000000000..13a943a02b
--- /dev/null
+++ b/test/files/neg/t7330.scala
@@ -0,0 +1,5 @@
+class Y[T]
+class Test {
+ // TypeTree is not a valid tree for a pattern
+ 0 match { case Y[_] => }
+} \ No newline at end of file
diff --git a/test/files/neg/t7369.check b/test/files/neg/t7369.check
new file mode 100644
index 0000000000..a4e99f496e
--- /dev/null
+++ b/test/files/neg/t7369.check
@@ -0,0 +1,15 @@
+t7369.scala:6: warning: unreachable code
+ case Tuple1(X) => // unreachable
+ ^
+t7369.scala:13: warning: unreachable code
+ case Tuple1(true) => // unreachable
+ ^
+t7369.scala:31: warning: unreachable code
+ case Tuple1(X) => // unreachable
+ ^
+t7369.scala:40: warning: unreachable code
+ case Tuple1(null) => // unreachable
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t7369.flags b/test/files/neg/t7369.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7369.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7369.scala b/test/files/neg/t7369.scala
new file mode 100644
index 0000000000..87ddfe98b7
--- /dev/null
+++ b/test/files/neg/t7369.scala
@@ -0,0 +1,43 @@
+object Test {
+ val X, Y = true
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) =>
+ case Tuple1(X) => // unreachable
+ case _ =>
+ }
+
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(true) =>
+ case Tuple1(false) =>
+ case Tuple1(true) => // unreachable
+ case _ =>
+ }
+}
+
+
+sealed abstract class B;
+case object True extends B;
+case object False extends B;
+
+object Test2 {
+
+ val X: B = True
+ val Y: B = False
+
+ (null: Tuple1[B]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) =>
+ case Tuple1(X) => // unreachable
+ case _ =>
+ }
+}
+
+object Test3 {
+ (null: Tuple1[B]) match {
+ case Tuple1(null) =>
+ case Tuple1(True) =>
+ case Tuple1(null) => // unreachable
+ case _ =>
+ }
+}
diff --git a/test/files/neg/t7385.check b/test/files/neg/t7385.check
new file mode 100644
index 0000000000..70d3c3fb61
--- /dev/null
+++ b/test/files/neg/t7385.check
@@ -0,0 +1,10 @@
+t7385.scala:2: error: '(' expected but identifier found.
+ do { println("bippy") } while i<10
+ ^
+t7385.scala:6: error: '(' expected but identifier found.
+ while i<10 { () }
+ ^
+t7385.scala:7: error: illegal start of simple expression
+}
+^
+three errors found
diff --git a/test/files/neg/t7385.scala b/test/files/neg/t7385.scala
new file mode 100644
index 0000000000..a7f801098b
--- /dev/null
+++ b/test/files/neg/t7385.scala
@@ -0,0 +1,7 @@
+object Bar {
+ do { println("bippy") } while i<10
+}
+
+object Foo {
+ while i<10 { () }
+}
diff --git a/test/files/neg/t7388.check b/test/files/neg/t7388.check
new file mode 100644
index 0000000000..0a29e04896
--- /dev/null
+++ b/test/files/neg/t7388.check
@@ -0,0 +1,4 @@
+t7388.scala:1: error: doesnotexist is not an enclosing class
+class Test private[doesnotexist]()
+ ^
+one error found
diff --git a/test/files/neg/t7388.scala b/test/files/neg/t7388.scala
new file mode 100644
index 0000000000..9ce9ea11b3
--- /dev/null
+++ b/test/files/neg/t7388.scala
@@ -0,0 +1 @@
+class Test private[doesnotexist]()
diff --git a/test/files/neg/t7473.check b/test/files/neg/t7473.check
new file mode 100644
index 0000000000..bc8c29d463
--- /dev/null
+++ b/test/files/neg/t7473.check
@@ -0,0 +1,7 @@
+t7473.scala:6: error: '<-' expected but '=' found.
+ (for (x = Option(i); if x == j) yield 42) toList
+ ^
+t7473.scala:6: error: illegal start of simple expression
+ (for (x = Option(i); if x == j) yield 42) toList
+ ^
+two errors found
diff --git a/test/files/neg/t7473.scala b/test/files/neg/t7473.scala
new file mode 100644
index 0000000000..593231d5f2
--- /dev/null
+++ b/test/files/neg/t7473.scala
@@ -0,0 +1,7 @@
+
+object Foo {
+ val i,j = 3
+ //for (x = Option(i); if x == j) yield 42 //t7473.scala:4: error: '<-' expected but '=' found.
+ // evil postfix!
+ (for (x = Option(i); if x == j) yield 42) toList
+}
diff --git a/test/pending/neg/plugin-after-terminal.check b/test/files/neg/t7494-after-terminal.check
index 096efe09cd..096efe09cd 100644
--- a/test/pending/neg/plugin-after-terminal.check
+++ b/test/files/neg/t7494-after-terminal.check
diff --git a/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala b/test/files/neg/t7494-after-terminal/ThePlugin.scala
index 2a4607392f..2a4607392f 100644
--- a/test/pending/neg/plugin-after-terminal/src/ThePlugin.scala
+++ b/test/files/neg/t7494-after-terminal/ThePlugin.scala
diff --git a/test/files/neg/t7494-after-terminal/sample_2.flags b/test/files/neg/t7494-after-terminal/sample_2.flags
new file mode 100644
index 0000000000..b8a476e361
--- /dev/null
+++ b/test/files/neg/t7494-after-terminal/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:afterterminal
diff --git a/test/files/neg/t7494-after-terminal/sample_2.scala b/test/files/neg/t7494-after-terminal/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t7494-after-terminal/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml b/test/files/neg/t7494-after-terminal/scalac-plugin.xml
index 90ff27dc2a..2558d6fd03 100644
--- a/test/pending/neg/plugin-before-parser/misc/scalac-plugin.xml
+++ b/test/files/neg/t7494-after-terminal/scalac-plugin.xml
@@ -1,5 +1,5 @@
<plugin>
- <name>beforeparser</name>
+ <name>ignored</name>
<classname>scala.test.plugins.ThePlugin</classname>
</plugin>
diff --git a/test/pending/neg/plugin-before-parser.check b/test/files/neg/t7494-before-parser.check
index 9a407923b1..9a407923b1 100644
--- a/test/pending/neg/plugin-before-parser.check
+++ b/test/files/neg/t7494-before-parser.check
diff --git a/test/pending/neg/plugin-before-parser/src/ThePlugin.scala b/test/files/neg/t7494-before-parser/ThePlugin.scala
index 7ca896650d..7ca896650d 100644
--- a/test/pending/neg/plugin-before-parser/src/ThePlugin.scala
+++ b/test/files/neg/t7494-before-parser/ThePlugin.scala
diff --git a/test/files/neg/t7494-before-parser/sample_2.flags b/test/files/neg/t7494-before-parser/sample_2.flags
new file mode 100644
index 0000000000..0c92fc8a06
--- /dev/null
+++ b/test/files/neg/t7494-before-parser/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:beforeparser
diff --git a/test/files/neg/t7494-before-parser/sample_2.scala b/test/files/neg/t7494-before-parser/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t7494-before-parser/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml b/test/files/neg/t7494-before-parser/scalac-plugin.xml
index 90ff27dc2a..90ff27dc2a 100644
--- a/test/pending/neg/plugin-after-terminal/misc/scalac-plugin.xml
+++ b/test/files/neg/t7494-before-parser/scalac-plugin.xml
diff --git a/test/files/neg/t7494-cyclic-dependency.check b/test/files/neg/t7494-cyclic-dependency.check
new file mode 100644
index 0000000000..205387c3dd
--- /dev/null
+++ b/test/files/neg/t7494-cyclic-dependency.check
@@ -0,0 +1 @@
+error: Cycle in compiler phase dependencies detected, phase cyclicdependency2 reacted twice!
diff --git a/test/pending/neg/plugin-multiple-rafter.check b/test/files/neg/t7494-multi-right-after.check
index c54f884feb..9c6fdbe91f 100644
--- a/test/pending/neg/plugin-multiple-rafter.check
+++ b/test/files/neg/t7494-multi-right-after.check
@@ -1,4 +1,3 @@
-error: fatal error: Multiple phases want to run right after the phase explicitouter
+error: Multiple phases want to run right after the phase explicitouter
Phases: erasure, multi-rafter,
Re-run with -Xgenerate-phase-graph <filename> to better see the problem.
-one error found
diff --git a/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala b/test/files/neg/t7494-multi-right-after/ThePlugin.scala
index 819176fa88..819176fa88 100644
--- a/test/pending/neg/plugin-multiple-rafter/src/ThePlugin.scala
+++ b/test/files/neg/t7494-multi-right-after/ThePlugin.scala
diff --git a/test/files/neg/t7494-multi-right-after/sample_2.flags b/test/files/neg/t7494-multi-right-after/sample_2.flags
new file mode 100644
index 0000000000..9273fb98d7
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:multi-rafter
diff --git a/test/files/neg/t7494-multi-right-after/sample_2.scala b/test/files/neg/t7494-multi-right-after/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t7494-multi-right-after/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml b/test/files/neg/t7494-multi-right-after/scalac-plugin.xml
index 90ff27dc2a..2558d6fd03 100644
--- a/test/pending/neg/plugin-cyclic-dependency/misc/scalac-plugin.xml
+++ b/test/files/neg/t7494-multi-right-after/scalac-plugin.xml
@@ -1,5 +1,5 @@
<plugin>
- <name>beforeparser</name>
+ <name>ignored</name>
<classname>scala.test.plugins.ThePlugin</classname>
</plugin>
diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check
new file mode 100644
index 0000000000..c197d2a671
--- /dev/null
+++ b/test/files/neg/t7494-no-options.check
@@ -0,0 +1,33 @@
+error: Error: ploogin has no options
+ phase name id description
+ ---------- -- -----------
+ parser 1 parse source into ASTs, perform simple desugaring
+ namer 2 resolve names, attach symbols to named trees
+packageobjects 3 load package objects
+ typer 4 the meat and potatoes: type the trees
+ patmat 5 translate match expressions
+superaccessors 6 add super accessors in traits and nested classes
+ extmethods 7 add extension methods for inline classes
+ pickler 8 serialize symbol tables
+ refchecks 9 reference/override checking, translate nested objects
+ uncurry 10 uncurry, translate function values to anonymous classes
+ tailcalls 11 replace tail calls by jumps
+ specialize 12 @specialized-driven class and method specialization
+ explicitouter 13 this refs to outer pointers, translate patterns
+ erasure 14 erase types, add interfaces for traits
+ posterasure 15 clean up erased inline classes
+ lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ lambdalift 17 move nested functions to top level
+ constructors 18 move field definitions into constructors
+ flatten 19 eliminate inner classes
+ mixin 20 mixin composition
+ cleanup 21 platform-specific cleanups, generate reflective calls
+ icode 22 generate portable intermediate code
+ inliner 23 optimization: do inlining
+inlinehandlers 24 optimization: inline exception handlers
+ closelim 25 optimization: eliminate uncalled closures
+ constopt 26 optimization: optimize null and other constants
+ dce 27 optimization: eliminate dead code
+ jvm 28 generate JVM bytecode
+ ploogin 29 A sample phase that does so many things it's kind of hard...
+ terminal 30 The last phase in the compiler chain
diff --git a/test/files/neg/t7494-no-options/ploogin_1.scala b/test/files/neg/t7494-no-options/ploogin_1.scala
new file mode 100644
index 0000000000..ed6adfc1cf
--- /dev/null
+++ b/test/files/neg/t7494-no-options/ploogin_1.scala
@@ -0,0 +1,31 @@
+
+package t6446
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin. */
+class Ploogin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "ploogin"
+ val description = "A sample plugin for testing."
+ val components = List[PluginComponent](TestComponent)
+
+ private object TestComponent extends PluginComponent {
+ val global: Ploogin.this.global.type = Ploogin.this.global
+ //override val runsBefore = List("refchecks")
+ val runsAfter = List("jvm")
+ val phaseName = Ploogin.this.name
+ override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+ def newPhase(prev: Phase) = new TestPhase(prev)
+ class TestPhase(prev: Phase) extends StdPhase(prev) {
+ override def description = TestComponent.this.description
+ def apply(unit: CompilationUnit) {
+ // kewl kode
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t7494-no-options/sample_2.flags b/test/files/neg/t7494-no-options/sample_2.flags
new file mode 100644
index 0000000000..7f0f7afe48
--- /dev/null
+++ b/test/files/neg/t7494-no-options/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xshow-phases -P:ploogin:inploog
diff --git a/test/files/neg/t7494-no-options/sample_2.scala b/test/files/neg/t7494-no-options/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t7494-no-options/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t7494-no-options/scalac-plugin.xml b/test/files/neg/t7494-no-options/scalac-plugin.xml
new file mode 100644
index 0000000000..e849bb5919
--- /dev/null
+++ b/test/files/neg/t7494-no-options/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>sample-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t7494-right-after-before.check b/test/files/neg/t7494-right-after-before.check
new file mode 100644
index 0000000000..9b57a7711b
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before.check
@@ -0,0 +1 @@
+error: phase erasure want to run right after explicitouter, but some phase has declared to run before erasure. Re-run with -Xgenerate-phase-graph <filename> to better see the problem.
diff --git a/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala b/test/files/neg/t7494-right-after-before/ThePlugin.scala
index 81ba85ae80..81ba85ae80 100644
--- a/test/pending/neg/plugin-rafter-before-1/src/ThePlugin.scala
+++ b/test/files/neg/t7494-right-after-before/ThePlugin.scala
diff --git a/test/files/neg/t7494-right-after-before/sample_2.flags b/test/files/neg/t7494-right-after-before/sample_2.flags
new file mode 100644
index 0000000000..97d0f5b5f6
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:rafter-before-1
diff --git a/test/files/neg/t7494-right-after-before/sample_2.scala b/test/files/neg/t7494-right-after-before/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t7494-right-after-before/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml b/test/files/neg/t7494-right-after-before/scalac-plugin.xml
index 90ff27dc2a..2558d6fd03 100644
--- a/test/pending/neg/plugin-multiple-rafter/misc/scalac-plugin.xml
+++ b/test/files/neg/t7494-right-after-before/scalac-plugin.xml
@@ -1,5 +1,5 @@
<plugin>
- <name>beforeparser</name>
+ <name>ignored</name>
<classname>scala.test.plugins.ThePlugin</classname>
</plugin>
diff --git a/test/pending/neg/plugin-rightafter-terminal.check b/test/files/neg/t7494-right-after-terminal.check
index 6fe4f63c82..6fe4f63c82 100644
--- a/test/pending/neg/plugin-rightafter-terminal.check
+++ b/test/files/neg/t7494-right-after-terminal.check
diff --git a/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala b/test/files/neg/t7494-right-after-terminal/ThePlugin.scala
index 9d6d30b327..9d6d30b327 100644
--- a/test/pending/neg/plugin-rightafter-terminal/src/ThePlugin.scala
+++ b/test/files/neg/t7494-right-after-terminal/ThePlugin.scala
diff --git a/test/files/neg/t7494-right-after-terminal/sample_2.flags b/test/files/neg/t7494-right-after-terminal/sample_2.flags
new file mode 100644
index 0000000000..da046ba5f1
--- /dev/null
+++ b/test/files/neg/t7494-right-after-terminal/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:rightafterterminal
diff --git a/test/files/neg/t7494-right-after-terminal/sample_2.scala b/test/files/neg/t7494-right-after-terminal/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t7494-right-after-terminal/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml b/test/files/neg/t7494-right-after-terminal/scalac-plugin.xml
index 90ff27dc2a..2558d6fd03 100644
--- a/test/pending/neg/plugin-rafter-before-1/misc/scalac-plugin.xml
+++ b/test/files/neg/t7494-right-after-terminal/scalac-plugin.xml
@@ -1,5 +1,5 @@
<plugin>
- <name>beforeparser</name>
+ <name>ignored</name>
<classname>scala.test.plugins.ThePlugin</classname>
</plugin>
diff --git a/test/files/neg/t7507.check b/test/files/neg/t7507.check
new file mode 100644
index 0000000000..d402869fd4
--- /dev/null
+++ b/test/files/neg/t7507.check
@@ -0,0 +1,4 @@
+t7507.scala:6: error: value bippy in trait Cake cannot be accessed in Cake
+ locally(bippy)
+ ^
+one error found
diff --git a/test/files/neg/t7507.scala b/test/files/neg/t7507.scala
new file mode 100644
index 0000000000..1b4756d955
--- /dev/null
+++ b/test/files/neg/t7507.scala
@@ -0,0 +1,7 @@
+trait Cake extends Slice {
+ private[this] val bippy = ()
+}
+
+trait Slice { self: Cake =>
+ locally(bippy)
+}
diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check
new file mode 100644
index 0000000000..eaa6303cf5
--- /dev/null
+++ b/test/files/neg/t7509.check
@@ -0,0 +1,12 @@
+t7509.scala:3: error: inferred type arguments [Int] do not conform to method crash's type parameter bounds [R <: AnyRef]
+ crash(42)
+ ^
+t7509.scala:3: error: type mismatch;
+ found : Int(42)
+ required: R
+ crash(42)
+ ^
+t7509.scala:3: error: could not find implicit value for parameter ev: R
+ crash(42)
+ ^
+three errors found
diff --git a/test/files/neg/t7509.scala b/test/files/neg/t7509.scala
new file mode 100644
index 0000000000..3cba801ea7
--- /dev/null
+++ b/test/files/neg/t7509.scala
@@ -0,0 +1,4 @@
+object NMWE {
+ def crash[R <: AnyRef](f: R)(implicit ev: R): Any = ???
+ crash(42)
+}
diff --git a/test/files/neg/t7519.check b/test/files/neg/t7519.check
new file mode 100644
index 0000000000..164d67f595
--- /dev/null
+++ b/test/files/neg/t7519.check
@@ -0,0 +1,7 @@
+t7519.scala:5: error: could not find implicit value for parameter nada: Nothing
+ locally(0 : String) // was: "value conversion is not a member of C.this.C"
+ ^
+t7519.scala:15: error: could not find implicit value for parameter nada: Nothing
+ locally(0 : String) // was: "value conversion is not a member of U"
+ ^
+two errors found
diff --git a/test/files/neg/t7519.scala b/test/files/neg/t7519.scala
new file mode 100644
index 0000000000..aea0f35d8e
--- /dev/null
+++ b/test/files/neg/t7519.scala
@@ -0,0 +1,18 @@
+class C {
+ implicit def conversion(m: Int)(implicit nada: Nothing): String = ???
+
+ class C { // rename class to get correct error, can't find implicit: Nothing.
+ locally(0 : String) // was: "value conversion is not a member of C.this.C"
+ }
+}
+
+object Test2 {
+ trait T; trait U
+ new T {
+ implicit def conversion(m: Int)(implicit nada: Nothing): String = ???
+
+ new U { // nested anonymous classes also share a name.
+ locally(0 : String) // was: "value conversion is not a member of U"
+ }
+ }
+}
diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check
index d3432a7e76..1daad6922e 100644
--- a/test/files/neg/tailrec-2.check
+++ b/test/files/neg/tailrec-2.check
@@ -1,4 +1,4 @@
-tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call targeting supertype Super[A]
+tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call targeting a supertype
@annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
^
tailrec-2.scala:9: error: @tailrec annotated method contains no recursive calls
diff --git a/test/files/neg/unicode-unterminated-quote.check b/test/files/neg/unicode-unterminated-quote.check
index 5085505fb4..166488710b 100644
--- a/test/files/neg/unicode-unterminated-quote.check
+++ b/test/files/neg/unicode-unterminated-quote.check
@@ -1,7 +1,7 @@
unicode-unterminated-quote.scala:2: error: unclosed string literal
- val x = /u0022
+ val x = \u0022
^
unicode-unterminated-quote.scala:2: error: '}' expected but eof found.
- val x = /u0022
+ val x = \u0022
^
two errors found
diff --git a/test/files/neg/volatile_no_override.check b/test/files/neg/volatile_no_override.check
new file mode 100644
index 0000000000..a9a60ab697
--- /dev/null
+++ b/test/files/neg/volatile_no_override.check
@@ -0,0 +1,5 @@
+volatile_no_override.scala:13: error: overriding value x in class A of type Volatile.this.D;
+ value x has a volatile type; cannot override a member with non-volatile type
+ val x: A with D = null
+ ^
+one error found
diff --git a/test/files/neg/volatile_no_override.scala b/test/files/neg/volatile_no_override.scala
new file mode 100644
index 0000000000..9fad082a90
--- /dev/null
+++ b/test/files/neg/volatile_no_override.scala
@@ -0,0 +1,14 @@
+class B
+class C(x: String) extends B
+
+abstract class A {
+ class D { type T >: C <: B }
+ val x: D
+ var y: x.T = new C("abc")
+}
+
+class Volatile extends A {
+ type A >: Null
+ // test (1.4), pt 2 in RefChecks
+ val x: A with D = null
+}
diff --git a/test/files/pos/lub-dealias-widen.scala b/test/files/pos/lub-dealias-widen.scala
new file mode 100644
index 0000000000..38854fbc5c
--- /dev/null
+++ b/test/files/pos/lub-dealias-widen.scala
@@ -0,0 +1,34 @@
+import scala.language.higherKinds
+
+sealed trait Path {
+ type EncodeFunc
+ type Route[R] = List[String] => R
+
+ def >>(f: Route[Int]): Sitelet[EncodeFunc] = ???
+}
+
+case object PAny extends Path {
+ type EncodeFunc = List[String] => String
+}
+
+case class PLit[Next <: Path]() extends Path {
+ type EncodeFunc = Next#EncodeFunc
+}
+
+trait Sitelet[EncodeFunc] { self =>
+ def &[G <: H, H >: EncodeFunc](that: Sitelet[G]): Sitelet[H] = ???
+}
+
+object Test {
+ val r: Sitelet[Int => (Int => String)] = ???
+
+ val p2: PLit[PAny.type] = ???
+ val r2 /*: Sitelet[List[String] => String] */ // annotate type and it compiles with 2.10.0
+ = p2 >> { (xs: List[String]) => 0 }
+
+ // This works after https://github.com/scala/scala/commit/a06d31f6a
+ // Before: error: inferred type arguments [List[String] => String,List[String] => String]
+ // do not conform to method &'s type parameter bounds
+ // [G <: H,H >: Int => (Int => String)]
+ val s = r & r2
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-a.check b/test/files/pos/macro-qmarkqmarkqmark.check
index e69de29bb2..e69de29bb2 100644
--- a/test/files/run/macro-expand-tparams-bounds-a.check
+++ b/test/files/pos/macro-qmarkqmarkqmark.check
diff --git a/test/files/pos/macro-qmarkqmarkqmark.scala b/test/files/pos/macro-qmarkqmarkqmark.scala
new file mode 100644
index 0000000000..a91e4320b6
--- /dev/null
+++ b/test/files/pos/macro-qmarkqmarkqmark.scala
@@ -0,0 +1,7 @@
+import language.experimental.macros
+
+object Macros {
+ def foo1 = macro ???
+ def foo2(x: Int) = macro ???
+ def foo3[T] = macro ???
+} \ No newline at end of file
diff --git a/test/files/pos/spec-t6286.scala b/test/files/pos/spec-t6286.scala
new file mode 100755
index 0000000000..4d87998ec6
--- /dev/null
+++ b/test/files/pos/spec-t6286.scala
@@ -0,0 +1,10 @@
+trait Foo[@specialized(Int) A] {
+ def fun[@specialized(Int) B](init: B)(f: (B, A) => B): B
+}
+
+class Bar(values: Array[Int]) extends Foo[Int] {
+ def fun[@specialized(Int) C](init: C)(f: (C, Int) => C): C = {
+ val arr = values
+ f(init, arr(0))
+ }
+}
diff --git a/test/files/pos/t1648.scala b/test/files/pos/t1648.scala
deleted file mode 100644
index 6d53ce11ee..0000000000
--- a/test/files/pos/t1648.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test {
- class MyClass extends scala.util.logging.Logged { }
- val x = new MyClass with scala.util.logging.ConsoleLogger
-}
diff --git a/test/files/pos/t1786.scala b/test/files/pos/t1786.scala
new file mode 100644
index 0000000000..32d6c06f6e
--- /dev/null
+++ b/test/files/pos/t1786.scala
@@ -0,0 +1,19 @@
+class SomeClass(val intValue:Int)
+class MyClass[T <: SomeClass](val myValue:T)
+class Flooz[A >: Null <: SomeClass, T >: Null <: A](var value: T)
+
+class A {
+ def f1(i:MyClass[_]) = i.myValue.intValue
+ def f2(i:MyClass[_ <: SomeClass]) = i.myValue.intValue
+ // def f3[T](i: MyClass[T]) = i.myValue.intValue
+ def f4[T <: SomeClass](i: MyClass[T]) = i.myValue.intValue
+ // def f5[T >: Null](i: MyClass[T]) = i.myValue.intValue
+ // def f6[T >: Null <: String](i: MyClass[T]) = i.myValue.intValue + i.myValue.charAt(0)
+
+ // def g1[A, T](x: Flooz[A, T]) = { x.value = null ; x.value.intValue }
+ def g2(x: Flooz[_, _]) = { x.value = null ; x.value.intValue }
+
+ class MyClass2(x: MyClass[_]) { val p = x.myValue.intValue }
+ // class MyClass3[T <: String](x: MyClass[T]) { val p = x.myValue.intValue + x.myValue.length }
+ // class MyClass4[T >: Null](x: MyClass[T]) { val p = x.myValue.intValue }
+}
diff --git a/test/files/pos/t2613.scala b/test/files/pos/t2613.scala
new file mode 100644
index 0000000000..3a64dbc282
--- /dev/null
+++ b/test/files/pos/t2613.scala
@@ -0,0 +1,11 @@
+import language.existentials
+
+object Test {
+ class Row
+
+ abstract class MyRelation [R <: Row, +Relation <: MyRelation[R, Relation]]
+
+ type M = MyRelation[R, Relation] forSome {type R <: Row; type Relation <: MyRelation[R, Relation]}
+
+ var (x,y): (String, M) = null
+}
diff --git a/test/files/pos/t3688-redux.scala b/test/files/pos/t3688-redux.scala
deleted file mode 100644
index e601cf240c..0000000000
--- a/test/files/pos/t3688-redux.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import collection.JavaConverters._
-import java.{ util => ju }
-import scala.collection.{ mutable, immutable }
-
-object Test {
- def m[P <% AsJava[ju.List[Int]]](l: P) = 1
- m(List(1))
-} \ No newline at end of file
diff --git a/test/files/pos/t3943/Client_2.scala b/test/files/pos/t3943/Client_2.scala
new file mode 100644
index 0000000000..650ac9b7a9
--- /dev/null
+++ b/test/files/pos/t3943/Client_2.scala
@@ -0,0 +1,7 @@
+object Test {
+ val x: Child = new Child
+ x.getInner.foo("meh")
+// error: type mismatch;
+// found : java.lang.String("meh")
+// required: E
+}
diff --git a/test/files/pos/t3943/Outer_1.java b/test/files/pos/t3943/Outer_1.java
new file mode 100644
index 0000000000..1d38c5e76b
--- /dev/null
+++ b/test/files/pos/t3943/Outer_1.java
@@ -0,0 +1,14 @@
+class Outer<E> {
+ abstract class Inner {
+ abstract public void foo(E e);
+ }
+}
+
+class Child extends Outer<String> {
+ // the implicit prefix for Inner is Outer<E> instead of Outer<String>
+ public Inner getInner() {
+ return new Inner() {
+ public void foo(String e) { System.out.println("meh "+e); }
+ };
+ }
+}
diff --git a/test/files/pos/t4365/a_1.scala b/test/files/pos/t4365/a_1.scala
new file mode 100644
index 0000000000..6f3405b1ff
--- /dev/null
+++ b/test/files/pos/t4365/a_1.scala
@@ -0,0 +1,18 @@
+import scala.collection._
+
+trait SeqViewLike[+A,
+ +Coll,
+ +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
+ extends Seq[A] with GenSeqViewLike[A, Coll, This]
+{
+
+ trait Transformed[+B] extends super[GenSeqViewLike].Transformed[B]
+
+ abstract class AbstractTransformed[+B] extends Seq[B] with Transformed[B] {
+ def underlying: Coll = error("")
+ }
+
+ trait Reversed extends Transformed[A] with super[GenSeqViewLike].Reversed
+
+ protected def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
+}
diff --git a/test/files/pos/t4365/b_1.scala b/test/files/pos/t4365/b_1.scala
new file mode 100644
index 0000000000..e5b5687185
--- /dev/null
+++ b/test/files/pos/t4365/b_1.scala
@@ -0,0 +1,22 @@
+import scala.collection._
+
+trait GenSeqViewLike[+A,
+ +Coll,
+ +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
+extends GenSeq[A] {
+self =>
+
+ trait Transformed[+B] {
+ def length: Int = 0
+ def apply(idx: Int): B = error("")
+ }
+
+ trait Reversed extends Transformed[A] {
+ def iterator: Iterator[A] = createReversedIterator
+
+ private def createReversedIterator: Iterator[A] = {
+ self.foreach(_ => ())
+ null
+ }
+ }
+}
diff --git a/test/files/pos/t5022.scala b/test/files/pos/t5022.scala
new file mode 100644
index 0000000000..b9a085fb35
--- /dev/null
+++ b/test/files/pos/t5022.scala
@@ -0,0 +1,22 @@
+class ForSomeVsUnapply {
+ def test {
+ def makeWrap: Wrap = ???
+ def useRep[e](rep: (e, X[e])) = ()
+
+ val repUnapply = Wrap.unapply(makeWrap).get
+ useRep(repUnapply) // okay
+
+ val Wrap(rep0) = makeWrap
+ useRep(rep0) // error
+
+ val rep = makeWrap match {
+ case Wrap(r) => r
+ };
+
+ useRep(rep) // error
+ }
+}
+
+class X[e]
+
+case class Wrap(rep: (e, X[e]) forSome { type e })
diff --git a/test/pending/pos/t5459.scala b/test/files/pos/t5459.scala
index 971e6f896d..971e6f896d 100644
--- a/test/pending/pos/t5459.scala
+++ b/test/files/pos/t5459.scala
diff --git a/test/files/run/macro-expand-tparams-bounds-b.check b/test/files/pos/t5692c.check
index e69de29bb2..e69de29bb2 100644
--- a/test/files/run/macro-expand-tparams-bounds-b.check
+++ b/test/files/pos/t5692c.check
diff --git a/test/files/pos/t5692c.scala b/test/files/pos/t5692c.scala
new file mode 100644
index 0000000000..fa5f0b2dcd
--- /dev/null
+++ b/test/files/pos/t5692c.scala
@@ -0,0 +1,4 @@
+class C {
+ def foo[T: scala.reflect.ClassTag](xs: T*): Array[T] = ???
+ foo()
+} \ No newline at end of file
diff --git a/test/files/pos/t5886.scala b/test/files/pos/t5886.scala
new file mode 100644
index 0000000000..066187322d
--- /dev/null
+++ b/test/files/pos/t5886.scala
@@ -0,0 +1,18 @@
+object A {
+ def f0[T](x: T): T = x
+ def f1[T](x: => T): T = x
+ def f2[T](x: () => T): T = x()
+
+ f0(this.getClass) // ok
+ f1(this.getClass)
+ f2(this.getClass) // ok
+
+ // a.scala:7: error: type mismatch;
+ // found : Class[_ <: A.type]
+ // required: Class[?0(in value x1)] where type ?0(in value x1) <: A.type
+ // Note: A.type >: ?0, but Java-defined class Class is invariant in type T.
+ // You may wish to investigate a wildcard type such as `_ >: ?0`. (SLS 3.2.10)
+ // val x1 = f1(this.getClass)
+ // ^
+ // one error found
+}
diff --git a/test/files/pos/t6091.flags b/test/files/pos/t6091.flags
new file mode 100644
index 0000000000..954eaba352
--- /dev/null
+++ b/test/files/pos/t6091.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
diff --git a/test/files/pos/t6091.scala b/test/files/pos/t6091.scala
new file mode 100644
index 0000000000..72e663ec3b
--- /dev/null
+++ b/test/files/pos/t6091.scala
@@ -0,0 +1,10 @@
+object Foo { def eq(x:Int) = x }
+
+class X { def ==(other: String) = true }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ Foo eq 1
+ new X == null
+ }
+}
diff --git a/test/files/pos/t6162-inheritance.flags b/test/files/pos/t6162-inheritance.flags
new file mode 100644
index 0000000000..c6bfaf1f64
--- /dev/null
+++ b/test/files/pos/t6162-inheritance.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/t6162-inheritance.scala b/test/files/pos/t6162-inheritance.scala
index 7b47b9285a..fca751edab 100644
--- a/test/files/neg/t6162-inheritance.scala
+++ b/test/files/pos/t6162-inheritance.scala
@@ -1,5 +1,8 @@
package scala.t6126
+// Don't warn about inheritance in the same file.
+// We might use that as a prelude to sealing a class.
+
@deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
class Foo
diff --git a/test/files/pos/t6386.scala b/test/files/pos/t6386.scala
new file mode 100644
index 0000000000..85098a78f0
--- /dev/null
+++ b/test/files/pos/t6386.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ reify(manifest[Some[_]])
+} \ No newline at end of file
diff --git a/test/files/pos/t6675.flags b/test/files/pos/t6675.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6675.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6675.scala b/test/files/pos/t6675.scala
new file mode 100644
index 0000000000..f3bebea5be
--- /dev/null
+++ b/test/files/pos/t6675.scala
@@ -0,0 +1,20 @@
+object LeftOrRight {
+ def unapply[A](value: Either[A, A]): Option[A] = value match {
+ case scala.Left(x) => Some(x)
+ case scala.Right(x) => Some(x)
+ }
+}
+
+object Test {
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight(pair @ (a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight((a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight(a, b) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+}
diff --git a/test/files/pos/t6771.flags b/test/files/pos/t6771.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6771.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6771.scala b/test/files/pos/t6771.scala
new file mode 100644
index 0000000000..0f0bd4e4a0
--- /dev/null
+++ b/test/files/pos/t6771.scala
@@ -0,0 +1,9 @@
+object Test {
+ type Id[X] = X
+ val a: Id[Option[Int]] = None
+
+ a match {
+ case Some(x) => println(x)
+ case None =>
+ }
+}
diff --git a/test/files/pos/t6815.scala b/test/files/pos/t6815.scala
new file mode 100644
index 0000000000..9244b3d353
--- /dev/null
+++ b/test/files/pos/t6815.scala
@@ -0,0 +1,17 @@
+trait U {
+ trait ValOrDefDefApi {
+ def name: Any
+ }
+ type ValOrDefDef <: ValOrDefDefApi
+ type ValDef <: ValOrDefDef with ValDefApi
+ trait ValDefApi extends ValOrDefDefApi { this: ValDef => }
+ val emptyValDef: ValDef // the result type is volatile
+}
+
+object Test {
+ val u: U = ???
+
+ u.emptyValDef match {
+ case u.emptyValDef => // but we shouldn't let that stop us from treating it as a stable identifier pattern.
+ }
+}
diff --git a/test/files/pos/t6815_import.scala b/test/files/pos/t6815_import.scala
new file mode 100644
index 0000000000..56f4358d59
--- /dev/null
+++ b/test/files/pos/t6815_import.scala
@@ -0,0 +1,16 @@
+trait U {
+ trait ValOrDefDefApi {
+ def name: Any
+ }
+ type ValOrDefDef <: ValOrDefDefApi
+ type ValDef <: ValOrDefDef with ValDefApi
+ trait ValDefApi extends ValOrDefDefApi { this: ValDef => }
+ val emptyValDef: ValDef // the result type is volatile
+}
+
+object Test {
+ val u: U = ???
+
+ // but we shouldn't let that stop us from treating it as a stable identifier for import
+ import u.emptyValDef.name
+}
diff --git a/test/files/pos/t7200b.scala b/test/files/pos/t7200b.scala
new file mode 100644
index 0000000000..9d579c6ef9
--- /dev/null
+++ b/test/files/pos/t7200b.scala
@@ -0,0 +1,50 @@
+import language.higherKinds
+
+trait T {
+ def t = 0
+}
+trait Foo {
+ def coflatMap[A <: T](f: A): A
+}
+
+object O extends Foo {
+ def coflatMap[A <: T](f: A) = {
+ val f2 = coflatMap(f) // inferred in 2.9.2 / 2.10.0 as [Nothing]
+ f2.t // so this does't type check.
+ f2
+ }
+}
+
+// Why? When a return type is inherited, the derived method
+// symbol first gets a preliminary type assigned, based on the
+// 1) method type of a unique matching super member
+// 2) viewed as a member type of the inheritor (to substitute,
+// e.g. class type parameters)
+// 3) substituted to replace the super-method's type parameters
+// with those of the inheritor
+// 4) dissected to take just the return type wrapped in thisMethodType().
+//
+// In Scala 2.10.0 and earlier, this preliminary method type
+//
+// 1) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329
+// 2) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329
+// 3) (<param> f#12556: A#11336)A#11336
+// 4) [A#11336 <: <empty>#3.this.T#7068](<param> f#12552: A#11337&0)A#11336
+//
+// The type #4 from the old version is problematic: the parameter is typed with
+// a skolem for the type parameter `A`. It won't be considered to match the
+// method it overrides, instead they are seen as being overloaded, and type inference
+// goes awry (Nothing is inferred as the type argument for the recursive call
+// to coflatMap.
+//
+// The Namers patch adds one step here: it subsitutes the type parameter symbols
+// for the skolems:
+//
+// https://github.com/scala/scala/commit/b74c33eb#L2R1014
+//
+// So we end up with a method symbol info:
+//
+// 5) [A#11336 <: <empty>#3.this.T#7068](<param> f#12505: A#11336)A#11336
+//
+// This *does* match the method in the super class, and type inference
+// chooses the correct type argument. \ No newline at end of file
diff --git a/test/files/pos/t7264/A_1.scala b/test/files/pos/t7264/A_1.scala
new file mode 100644
index 0000000000..044d0110a2
--- /dev/null
+++ b/test/files/pos/t7264/A_1.scala
@@ -0,0 +1,11 @@
+object Foo {
+ object Values {
+ implicit def fromInt(x: Int): Values = ???
+ }
+ trait Values
+}
+final class Foo(name: String) {
+ def bar(values: Foo.Values): Bar = ???
+}
+
+trait Bar
diff --git a/test/files/pos/t7264/B_2.scala b/test/files/pos/t7264/B_2.scala
new file mode 100644
index 0000000000..869c51481d
--- /dev/null
+++ b/test/files/pos/t7264/B_2.scala
@@ -0,0 +1,7 @@
+object Test {
+ // if the following line is uncommented, things compile
+ // type X = Foo.Values
+
+
+ def foo(f: Foo) = f.bar(0 /* : Foo.Values */)
+}
diff --git a/test/files/pos/t7315.flags b/test/files/pos/t7315.flags
new file mode 100644
index 0000000000..d1b831ea87
--- /dev/null
+++ b/test/files/pos/t7315.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7315.scala b/test/files/pos/t7315.scala
new file mode 100644
index 0000000000..0abcea2451
--- /dev/null
+++ b/test/files/pos/t7315.scala
@@ -0,0 +1,4 @@
+package scala.pack
+
+@deprecatedInheritance
+class C[@specialized A] \ No newline at end of file
diff --git a/test/files/pos/t7329.scala b/test/files/pos/t7329.scala
new file mode 100644
index 0000000000..76bf1fb9f5
--- /dev/null
+++ b/test/files/pos/t7329.scala
@@ -0,0 +1 @@
+class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B)) \ No newline at end of file
diff --git a/test/files/pos/t7364/BadList.java b/test/files/pos/t7364/BadList.java
new file mode 100644
index 0000000000..2692fa085f
--- /dev/null
+++ b/test/files/pos/t7364/BadList.java
@@ -0,0 +1,3 @@
+public class BadList extends java.util.ArrayList {
+ public java.util.ArrayList foo() { return null; }
+}
diff --git a/test/files/pos/t7364/UseIt.scala b/test/files/pos/t7364/UseIt.scala
new file mode 100644
index 0000000000..3847165323
--- /dev/null
+++ b/test/files/pos/t7364/UseIt.scala
@@ -0,0 +1,4 @@
+class UseIt {
+ val list = new BadList
+ list.foo()
+}
diff --git a/test/files/pos/t7364b/BadList_1.java b/test/files/pos/t7364b/BadList_1.java
new file mode 100644
index 0000000000..fbb428adba
--- /dev/null
+++ b/test/files/pos/t7364b/BadList_1.java
@@ -0,0 +1,3 @@
+public class BadList_1 extends java.util.ArrayList {
+ public java.util.ArrayList foo() { return null; }
+}
diff --git a/test/files/pos/t7364b/UseIt_2.scala b/test/files/pos/t7364b/UseIt_2.scala
new file mode 100644
index 0000000000..06b50f6766
--- /dev/null
+++ b/test/files/pos/t7364b/UseIt_2.scala
@@ -0,0 +1,5 @@
+class UseIt {
+ val list = new BadList_1
+ list.foo()
+ list.set(0, list.get(0))
+}
diff --git a/test/files/pos/t7369.flags b/test/files/pos/t7369.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t7369.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t7369.scala b/test/files/pos/t7369.scala
new file mode 100644
index 0000000000..2f31c93d29
--- /dev/null
+++ b/test/files/pos/t7369.scala
@@ -0,0 +1,37 @@
+object Test {
+ val X, Y = true
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) => // unreachable
+ case _ =>
+ }
+}
+
+
+sealed abstract class B;
+case object True extends B;
+case object False extends B;
+
+object Test2 {
+
+ val X: B = True
+ val Y: B = False
+
+ (null: Tuple1[B]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) => // no warning
+ case _ =>
+ }
+}
+
+object Test3 {
+ val X, O = true
+ def classify(neighbourhood: (Boolean, Boolean, Boolean)): String = {
+ neighbourhood match {
+ case (X, X, X) => "middle"
+ case (X, X, O) => "right"
+ case (O, X, X) => "left"
+ case _ => throw new IllegalArgumentException("Invalid")
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7377/Client_2.scala b/test/files/pos/t7377/Client_2.scala
new file mode 100644
index 0000000000..5728956cca
--- /dev/null
+++ b/test/files/pos/t7377/Client_2.scala
@@ -0,0 +1,11 @@
+object Test {
+ M.noop(List(1) match { case Nil => 0; case (x::xs) => x })
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ M.noop(Foo(0) match { case FooAlias(_) => 0 })
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ M.noop(Bar() match { case BarAlias() => 0 })
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
new file mode 100644
index 0000000000..a0ec1d84af
--- /dev/null
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -0,0 +1,7 @@
+import language.experimental._
+import reflect.macros.Context
+
+object M {
+ def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typeCheck(c.resetLocalAttrs(expr.tree)))
+ def noop[A](expr: A): A = macro noopImpl[A]
+}
diff --git a/test/files/pos/t7377b.scala b/test/files/pos/t7377b.scala
new file mode 100644
index 0000000000..aeee800d57
--- /dev/null
+++ b/test/files/pos/t7377b.scala
@@ -0,0 +1,13 @@
+object Test {
+ List(1) match { case Nil => 0; case (x::xs) => x }
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ Foo(0) match { case FooAlias(_) => 0 }
+ Foo(0) match { case Foo(_) => 0 }
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ Bar() match { case BarAlias() => 0 }
+ Bar() match { case Bar() => 0 }
+}
diff --git a/test/files/pos/t7426.scala b/test/files/pos/t7426.scala
new file mode 100644
index 0000000000..8e42ad1812
--- /dev/null
+++ b/test/files/pos/t7426.scala
@@ -0,0 +1,3 @@
+class foo(x: Any) extends annotation.StaticAnnotation
+
+@foo(new AnyRef { }) trait A
diff --git a/test/files/pos/t7427.flags b/test/files/pos/t7427.flags
new file mode 100644
index 0000000000..9c7d6400fc
--- /dev/null
+++ b/test/files/pos/t7427.flags
@@ -0,0 +1 @@
+-Ydebug
diff --git a/test/files/pos/t7427.scala b/test/files/pos/t7427.scala
new file mode 100644
index 0000000000..cca52950d1
--- /dev/null
+++ b/test/files/pos/t7427.scala
@@ -0,0 +1,4 @@
+// Compiles with no options
+// Compiles with -Ydebug -Ydisable-unreachable-prevention
+// Crashes with -Ydebug
+trait Bippy { 3 match { case 3 => } }
diff --git a/test/files/pos/t7461.check b/test/files/pos/t7461.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/pos/t7461.check
diff --git a/test/files/pos/t7461/Macros_1.scala b/test/files/pos/t7461/Macros_1.scala
new file mode 100644
index 0000000000..353dec66d7
--- /dev/null
+++ b/test/files/pos/t7461/Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val wut = c.typeCheck(Select(Literal(Constant(10)), newTermName("$minus")), silent = true)
+ // println(showRaw(wut, printIds = true, printTypes = true))
+ c.literalUnit
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/pos/t7461/Test_2.scala b/test/files/pos/t7461/Test_2.scala
new file mode 100644
index 0000000000..3839659c9a
--- /dev/null
+++ b/test/files/pos/t7461/Test_2.scala
@@ -0,0 +1,3 @@
+class C {
+ def foo = Macros.foo
+} \ No newline at end of file
diff --git a/test/files/pos/t7505.scala b/test/files/pos/t7505.scala
new file mode 100644
index 0000000000..3e1e6ab8b4
--- /dev/null
+++ b/test/files/pos/t7505.scala
@@ -0,0 +1,16 @@
+import scala.language.reflectiveCalls
+
+case class ContextProperty(value: Any) {
+ type HasToInt = { def toInt:Int }
+
+ def toInt: Int = value match {
+ case n: HasToInt => n.toInt
+ }
+}
+
+// was:
+// error:7: error during expansion of this match (this is a scalac bug).
+// The underlying error was: type mismatch;
+// found : Boolean(true)
+// required: AnyRef
+// def toInt: Int = value match { \ No newline at end of file
diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala
new file mode 100644
index 0000000000..3bba19966d
--- /dev/null
+++ b/test/files/pos/t7516/A_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect._,macros._, scala.language.experimental.macros
+
+object A {
+ def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+ }
+ def demo[T](t: T): List[T] = macro impl[T]
+}
diff --git a/test/files/pos/t7516/B_2.scala b/test/files/pos/t7516/B_2.scala
new file mode 100644
index 0000000000..1b8531bc85
--- /dev/null
+++ b/test/files/pos/t7516/B_2.scala
@@ -0,0 +1,4 @@
+object B {
+ final case class CV(p: Int = 3, g: Int = 2)
+ A.demo { val d = 4; CV(g = d); "a" }
+}
diff --git a/test/files/pos/t7517.scala b/test/files/pos/t7517.scala
new file mode 100644
index 0000000000..7ce4c6b13e
--- /dev/null
+++ b/test/files/pos/t7517.scala
@@ -0,0 +1,22 @@
+trait Box[ K[A[x]] ]
+
+object Box {
+ // type constructor composition
+ sealed trait ∙[A[_], B[_]] { type l[T] = A[B[T]] }
+
+ // composes type constructors inside K
+ type SplitBox[K[A[x]], B[x]] = Box[ ({ type l[A[x]] = K[ (A ∙ B)#l] })#l ]
+
+ def split[ K[A[x]], B[x] ](base: Box[K]): SplitBox[K,B] = ???
+
+ class Composed[B[_], L[A[x]] ] {
+ val box: Box[L] = ???
+
+ type Split[ A[x] ] = L[ (A ∙ B)#l ]
+ val a: Box[Split] = Box.split(box)
+
+ //Either of these work:
+ val a1: Box[Split] = Box.split[L,B](box)
+ val a2: Box[ ({ type l[A[x]] = L[ (A ∙ B)#l ] })#l ] = Box.split(box)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7520.scala b/test/files/pos/t7520.scala
new file mode 100644
index 0000000000..747f5278e5
--- /dev/null
+++ b/test/files/pos/t7520.scala
@@ -0,0 +1,10 @@
+class A {
+ val x: Singleton with this.type = this
+ val y: this.type = x
+}
+
+class B {
+ val x = ""
+ val xs: x.type with Singleton = x
+ val y: x.type = xs
+}
diff --git a/test/files/pos/t7532/A_1.java b/test/files/pos/t7532/A_1.java
new file mode 100644
index 0000000000..1ade76cc70
--- /dev/null
+++ b/test/files/pos/t7532/A_1.java
@@ -0,0 +1,6 @@
+class R {
+ public class attr { // Will have the bytecode name `R$attr`, not to be confused with `R@tr`!
+ }
+ public static class attr1 {
+ }
+}
diff --git a/test/files/pos/t7532/B_2.scala b/test/files/pos/t7532/B_2.scala
new file mode 100644
index 0000000000..ee7ce7751f
--- /dev/null
+++ b/test/files/pos/t7532/B_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ val r = new R
+ new r.attr() // Was: error while loading attr, class file '.../t7532-pos.obj/R$attr.class' has location not matching its contents: contains class
+ new R.attr1
+} \ No newline at end of file
diff --git a/test/files/pos/t7532b/A_1.scala b/test/files/pos/t7532b/A_1.scala
new file mode 100644
index 0000000000..e8f9540609
--- /dev/null
+++ b/test/files/pos/t7532b/A_1.scala
@@ -0,0 +1,7 @@
+package pack
+class R {
+ class attr // Will have the bytecode name `R$attr`, not to be confused with `R@tr`!
+ class `@`
+}
+
+class `@` \ No newline at end of file
diff --git a/test/files/pos/t7532b/B_2.scala b/test/files/pos/t7532b/B_2.scala
new file mode 100644
index 0000000000..1555a5daa7
--- /dev/null
+++ b/test/files/pos/t7532b/B_2.scala
@@ -0,0 +1,8 @@
+import pack._
+
+object Test {
+ val r = new R
+ new r.attr()
+ new r.`@`
+ new `@`
+} \ No newline at end of file
diff --git a/test/files/pos/xlint1.flags b/test/files/pos/xlint1.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/pos/xlint1.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/pos/xlint1.scala b/test/files/pos/xlint1.scala
new file mode 100644
index 0000000000..27936d8b14
--- /dev/null
+++ b/test/files/pos/xlint1.scala
@@ -0,0 +1,13 @@
+package object foo {
+ implicit class Bar[T](val x: T) extends AnyVal {
+ def bippy = 1
+ }
+}
+
+package foo {
+ object Baz {
+ def main(args: Array[String]): Unit = {
+ "abc".bippy
+ }
+ }
+}
diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check
index af0154fe60..59c841a255 100644
--- a/test/files/presentation/callcc-interpreter.check
+++ b/test/files/presentation/callcc-interpreter.check
@@ -2,91 +2,91 @@ reload: CallccInterpreter.scala
askTypeCompletion at CallccInterpreter.scala(51,38)
================================================================================
-[response] aksTypeCompletion at (51,38)
+[response] askCompletionAt (51,38)
retrieved 63 members
-[accessible: true] `class AddcallccInterpreter.Add`
-[accessible: true] `class AppcallccInterpreter.App`
-[accessible: true] `class CcccallccInterpreter.Ccc`
-[accessible: true] `class ConcallccInterpreter.Con`
-[accessible: true] `class FuncallccInterpreter.Fun`
-[accessible: true] `class LamcallccInterpreter.Lam`
-[accessible: true] `class McallccInterpreter.M`
-[accessible: true] `class NumcallccInterpreter.Num`
-[accessible: true] `class VarcallccInterpreter.Var`
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(callccInterpreter.type, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method add(a: callccInterpreter.Value, b: callccInterpreter.Value)callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value]`
-[accessible: true] `method apply(a: callccInterpreter.Value, b: callccInterpreter.Value)callccInterpreter.M[callccInterpreter.Value]`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A])callccInterpreter.M[A]`
-[accessible: true] `method clone()Object`
-[accessible: true] `method ensuring(cond: Boolean)callccInterpreter.type`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)callccInterpreter.type`
-[accessible: true] `method ensuring(cond: callccInterpreter.type => Boolean)callccInterpreter.type`
-[accessible: true] `method ensuring(cond: callccInterpreter.type => Boolean, msg: => Any)callccInterpreter.type`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method finalize()Unit`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method id[A]=> A => A`
-[accessible: true] `method interp(t: callccInterpreter.Term, e: callccInterpreter.Environment)callccInterpreter.M[callccInterpreter.Value]`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method lookup(x: callccInterpreter.Name, e: callccInterpreter.Environment)callccInterpreter.M[callccInterpreter.Value]`
-[accessible: true] `method main(args: Array[String])Unit`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method showM(m: callccInterpreter.M[callccInterpreter.Value])String`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method test(t: callccInterpreter.Term)String`
-[accessible: true] `method toString()String`
-[accessible: true] `method unitM[A](a: A)callccInterpreter.M[A]`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(callccInterpreter.type, B)`
-[accessible: true] `object WrongcallccInterpreter.Wrong.type`
-[accessible: true] `trait TermcallccInterpreter.Term`
-[accessible: true] `trait ValuecallccInterpreter.Value`
-[accessible: true] `type AnswercallccInterpreter.Answer`
-[accessible: true] `type EnvironmentcallccInterpreter.Environment`
-[accessible: true] `type NamecallccInterpreter.Name`
-[accessible: true] `value __leftOfArrowcallccInterpreter.type`
-[accessible: true] `value __resultOfEnsuringcallccInterpreter.type`
-[accessible: true] `value __stringToFormatcallccInterpreter.type`
-[accessible: true] `value __thingToAddcallccInterpreter.type`
-[accessible: true] `value term0callccInterpreter.App`
-[accessible: true] `value term1callccInterpreter.App`
-[accessible: true] `value term2callccInterpreter.Add`
+abstract trait Term extends AnyRef
+abstract trait Value extends AnyRef
+case class Add extends callccInterpreter.Term with Product with Serializable
+case class App extends callccInterpreter.Term with Product with Serializable
+case class Ccc extends callccInterpreter.Term with Product with Serializable
+case class Con extends callccInterpreter.Term with Product with Serializable
+case class Fun extends callccInterpreter.Value with Product with Serializable
+case class Lam extends callccInterpreter.Term with Product with Serializable
+case class M[A] extends Product with Serializable
+case class Num extends callccInterpreter.Value with Product with Serializable
+case class Var extends callccInterpreter.Term with Product with Serializable
+case object Wrong
+def +(other: String): String
+def ->[B](y: B): (callccInterpreter.type, B)
+def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value]
+def apply(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[callccInterpreter.Value]
+def callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A]): callccInterpreter.M[A]
+def ensuring(cond: Boolean): callccInterpreter.type
+def ensuring(cond: Boolean,msg: => Any): callccInterpreter.type
+def ensuring(cond: callccInterpreter.type => Boolean): callccInterpreter.type
+def ensuring(cond: callccInterpreter.type => Boolean,msg: => Any): callccInterpreter.type
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def id[A]: A => A
+def interp(t: callccInterpreter.Term,e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value]
+def lookup(x: callccInterpreter.Name,e: callccInterpreter.Environment): callccInterpreter.M[callccInterpreter.Value]
+def main(args: Array[String]): Unit
+def showM(m: callccInterpreter.M[callccInterpreter.Value]): String
+def test(t: callccInterpreter.Term): String
+def toString(): String
+def unitM[A](a: A): callccInterpreter.M[A]
+def →[B](y: B): (callccInterpreter.type, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: callccInterpreter.type
+private[this] val __resultOfEnsuring: callccInterpreter.type
+private[this] val __stringToFormat: callccInterpreter.type
+private[this] val __thingToAdd: callccInterpreter.type
+private[this] val term0: callccInterpreter.App
+private[this] val term1: callccInterpreter.App
+private[this] val term2: callccInterpreter.Add
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
+type Answer = callccInterpreter.Answer
+type Environment = callccInterpreter.Environment
+type Name = callccInterpreter.Name
================================================================================
askType at CallccInterpreter.scala(14,21)
================================================================================
-[response] askTypeAt at (14,21)
-def unitM[A >: Nothing <: Any](a: A): callccInterpreter.M[A] = callccInterpreter.this.M.apply[A](((c: A => callccInterpreter.Answer) => c.apply(a)))
+[response] askTypeAt (14,21)
+def unitM[A](a: A): callccInterpreter.M[A] = callccInterpreter.this.M.apply[A](((c: A => callccInterpreter.Answer) => c.apply(a)))
================================================================================
askType at CallccInterpreter.scala(16,12)
================================================================================
-[response] askTypeAt at (16,12)
-def id[A >: Nothing <: Any]: A => A = ((x: A) => x)
+[response] askTypeAt (16,12)
+def id[A]: A => A = ((x: A) => x)
================================================================================
askType at CallccInterpreter.scala(17,25)
================================================================================
-[response] askTypeAt at (17,25)
+[response] askTypeAt (17,25)
def showM(m: callccInterpreter.M[callccInterpreter.Value]): String = m.in.apply(callccInterpreter.this.id[callccInterpreter.Value]).toString()
================================================================================
askType at CallccInterpreter.scala(50,30)
================================================================================
-[response] askTypeAt at (50,30)
+[response] askTypeAt (50,30)
def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.this.Predef.Pair.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match {
case scala.this.Predef.Pair.unapply[callccInterpreter.Value, callccInterpreter.Value](<unapply-selector>) <unapply> ((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n)))
case _ => callccInterpreter.this.unitM[callccInterpreter.Wrong.type](callccInterpreter.this.Wrong)
diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index 916b7832f4..c884b6425b 100755
--- a/test/files/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -1,9 +1,9 @@
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import scala.tools.nsc.doc
import scala.tools.nsc.doc.base._
import scala.tools.nsc.doc.base.comment._
import scala.tools.nsc.interactive._
import scala.tools.nsc.interactive.tests._
-import scala.tools.nsc.util._
object Test extends InteractiveTest {
val tags = Seq(
@@ -71,7 +71,7 @@ object Test extends InteractiveTest {
if (expanded.isEmpty)
None
else
- Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) })
+ Some(ask { () => parseAtSymbol(expanded, raw, pos, sym.owner) })
}
}
}
@@ -100,12 +100,11 @@ object Test extends InteractiveTest {
println("Couldn't parse")
case Some(_) =>
val sym = compiler.ask { () =>
- val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name))
+ val toplevel = compiler.rootMirror.EmptyPackage.info.decl(TypeName(name))
if (toplevel eq NoSymbol) {
- val clazz = definitions.EmptyPackage.info.decl(newTypeName(className))
-
- val term = clazz.info.decl(newTermName(name))
- if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else
+ val clazz = compiler.rootMirror.EmptyPackage.info.decl(TypeName(className))
+ val term = clazz.info.decl(TermName(name))
+ if (term eq NoSymbol) clazz.info.decl(TypeName(name)) else
if (term.isAccessor) term.accessed else term
} else toplevel
}
@@ -133,7 +132,7 @@ object Test extends InteractiveTest {
case c: Comment => existsText(c.body, text)
}
val (derived, base) = compiler.ask { () =>
- val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
+ val derived = compiler.rootMirror.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
(derived, derived.ancestors(0))
}
val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil)
diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check
index 0040300083..b15486f4ac 100644
--- a/test/files/presentation/ide-bug-1000349.check
+++ b/test/files/presentation/ide-bug-1000349.check
@@ -2,39 +2,39 @@ reload: CompletionOnEmptyArgMethod.scala
askTypeCompletion at CompletionOnEmptyArgMethod.scala(2,17)
================================================================================
-[response] aksTypeCompletion at (2,17)
+[response] askCompletionAt (2,17)
retrieved 36 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Foo, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method clone()Object`
-[accessible: true] `method ensuring(cond: Boolean)Foo`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Foo`
-[accessible: true] `method ensuring(cond: Foo => Boolean)Foo`
-[accessible: true] `method ensuring(cond: Foo => Boolean, msg: => Any)Foo`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method finalize()Unit`
-[accessible: true] `method foo=> Foo`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Foo, B)`
-[accessible: true] `value __leftOfArrowFoo`
-[accessible: true] `value __resultOfEnsuringFoo`
-[accessible: true] `value __stringToFormatFoo`
-[accessible: true] `value __thingToAddFoo`
+def +(other: String): String
+def ->[B](y: B): (Foo, B)
+def ensuring(cond: Boolean): Foo
+def ensuring(cond: Boolean,msg: => Any): Foo
+def ensuring(cond: Foo => Boolean): Foo
+def ensuring(cond: Foo => Boolean,msg: => Any): Foo
+def equals(x$1: Any): Boolean
+def foo: Foo
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Foo, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: Foo
+private[this] val __resultOfEnsuring: Foo
+private[this] val __stringToFormat: Foo
+private[this] val __thingToAdd: Foo
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
================================================================================
diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check
index 7866e4af15..e4b8508846 100644
--- a/test/files/presentation/ide-bug-1000475.check
+++ b/test/files/presentation/ide-bug-1000475.check
@@ -2,114 +2,114 @@ reload: Foo.scala
askTypeCompletion at Foo.scala(3,7)
================================================================================
-[response] aksTypeCompletion at (3,7)
+[response] askCompletionAt (3,7)
retrieved 35 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Object, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)Object`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Object`
-[accessible: true] `method ensuring(cond: Object => Boolean)Object`
-[accessible: true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Object, B)`
-[accessible: true] `value __leftOfArrowObject`
-[accessible: true] `value __resultOfEnsuringObject`
-[accessible: true] `value __stringToFormatObject`
-[accessible: true] `value __thingToAddObject`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Object, B)
+def ensuring(cond: Boolean): Object
+def ensuring(cond: Boolean,msg: => Any): Object
+def ensuring(cond: Object => Boolean): Object
+def ensuring(cond: Object => Boolean,msg: => Any): Object
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Object, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: Object
+private[this] val __resultOfEnsuring: Object
+private[this] val __stringToFormat: Object
+private[this] val __thingToAdd: Object
================================================================================
askTypeCompletion at Foo.scala(6,10)
================================================================================
-[response] aksTypeCompletion at (6,10)
+[response] askCompletionAt (6,10)
retrieved 35 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Object, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)Object`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Object`
-[accessible: true] `method ensuring(cond: Object => Boolean)Object`
-[accessible: true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Object, B)`
-[accessible: true] `value __leftOfArrowObject`
-[accessible: true] `value __resultOfEnsuringObject`
-[accessible: true] `value __stringToFormatObject`
-[accessible: true] `value __thingToAddObject`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Object, B)
+def ensuring(cond: Boolean): Object
+def ensuring(cond: Boolean,msg: => Any): Object
+def ensuring(cond: Object => Boolean): Object
+def ensuring(cond: Object => Boolean,msg: => Any): Object
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Object, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: Object
+private[this] val __resultOfEnsuring: Object
+private[this] val __stringToFormat: Object
+private[this] val __thingToAdd: Object
================================================================================
askTypeCompletion at Foo.scala(7,7)
================================================================================
-[response] aksTypeCompletion at (7,7)
+[response] askCompletionAt (7,7)
retrieved 35 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Object, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)Object`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Object`
-[accessible: true] `method ensuring(cond: Object => Boolean)Object`
-[accessible: true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Object, B)`
-[accessible: true] `value __leftOfArrowObject`
-[accessible: true] `value __resultOfEnsuringObject`
-[accessible: true] `value __stringToFormatObject`
-[accessible: true] `value __thingToAddObject`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Object, B)
+def ensuring(cond: Boolean): Object
+def ensuring(cond: Boolean,msg: => Any): Object
+def ensuring(cond: Object => Boolean): Object
+def ensuring(cond: Object => Boolean,msg: => Any): Object
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Object, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: Object
+private[this] val __resultOfEnsuring: Object
+private[this] val __stringToFormat: Object
+private[this] val __thingToAdd: Object
================================================================================
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index 18ecd4b536..be8805330a 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -2,127 +2,127 @@ reload: CrashOnLoad.scala
askTypeCompletion at CrashOnLoad.scala(6,12)
================================================================================
-[response] aksTypeCompletion at (6,12)
+[response] askCompletionAt (6,12)
retrieved 124 members
-[accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator`
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ++[B >: B](that: => scala.collection.GenTraversableOnce[B])Iterator[B]`
-[accessible: true] `method ->[B](y: B)(java.util.Iterator[B], B)`
-[accessible: true] `method /:[B](z: B)(op: (B, B) => B)B`
-[accessible: true] `method :\[B](z: B)(op: (B, B) => B)B`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method addString(b: StringBuilder)StringBuilder`
-[accessible: true] `method addString(b: StringBuilder, sep: String)StringBuilder`
-[accessible: true] `method addString(b: StringBuilder, start: String, sep: String, end: String)StringBuilder`
-[accessible: true] `method aggregate[B](z: => B)(seqop: (B, B) => B, combop: (B, B) => B)B`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method buffered=> scala.collection.BufferedIterator[B]`
-[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
-[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
-[accessible: true] `method contains(elem: Any)Boolean`
-[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
-[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
-[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
-[accessible: true] `method copyToBuffer[B >: B](dest: scala.collection.mutable.Buffer[B])Unit`
-[accessible: true] `method corresponds[B](that: scala.collection.GenTraversableOnce[B])(p: (B, B) => Boolean)Boolean`
-[accessible: true] `method count(p: B => Boolean)Int`
-[accessible: true] `method drop(n: Int)Iterator[B]`
-[accessible: true] `method dropWhile(p: B => Boolean)Iterator[B]`
-[accessible: true] `method duplicate=> (Iterator[B], Iterator[B])`
-[accessible: true] `method ensuring(cond: Boolean)java.util.Iterator[B]`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)java.util.Iterator[B]`
-[accessible: true] `method ensuring(cond: java.util.Iterator[B] => Boolean)java.util.Iterator[B]`
-[accessible: true] `method ensuring(cond: java.util.Iterator[B] => Boolean, msg: => Any)java.util.Iterator[B]`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method exists(p: B => Boolean)Boolean`
-[accessible: true] `method filter(p: B => Boolean)Iterator[B]`
-[accessible: true] `method filterNot(p: B => Boolean)Iterator[B]`
-[accessible: true] `method find(p: B => Boolean)Option[B]`
-[accessible: true] `method flatMap[B](f: B => scala.collection.GenTraversableOnce[B])Iterator[B]`
-[accessible: true] `method foldLeft[B](z: B)(op: (B, B) => B)B`
-[accessible: true] `method foldRight[B](z: B)(op: (B, B) => B)B`
-[accessible: true] `method fold[A1 >: B](z: A1)(op: (A1, A1) => A1)A1`
-[accessible: true] `method forall(p: B => Boolean)Boolean`
-[accessible: true] `method foreach[U](f: B => U)Unit`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method grouped[B >: B](size: Int)Iterator[B]#GroupedIterator[B]`
-[accessible: true] `method hasDefiniteSize=> Boolean`
-[accessible: true] `method hasNext()Boolean`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method indexOf[B >: B](elem: B)Int`
-[accessible: true] `method indexWhere(p: B => Boolean)Int`
-[accessible: true] `method isEmpty=> Boolean`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method isTraversableAgain=> Boolean`
-[accessible: true] `method length=> Int`
-[accessible: true] `method map[B](f: B => B)Iterator[B]`
-[accessible: true] `method maxBy[B](f: B => B)(implicit cmp: Ordering[B])B`
-[accessible: true] `method max[B >: B](implicit cmp: Ordering[B])B`
-[accessible: true] `method minBy[B](f: B => B)(implicit cmp: Ordering[B])B`
-[accessible: true] `method min[B >: B](implicit cmp: Ordering[B])B`
-[accessible: true] `method mkString(sep: String)String`
-[accessible: true] `method mkString(start: String, sep: String, end: String)String`
-[accessible: true] `method mkString=> String`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method next()B`
-[accessible: true] `method nonEmpty=> Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method padTo[A1 >: B](len: Int, elem: A1)Iterator[A1]`
-[accessible: true] `method partition(p: B => Boolean)(Iterator[B], Iterator[B])`
-[accessible: true] `method patch[B >: B](from: Int, patchElems: Iterator[B], replaced: Int)Iterator[B]`
-[accessible: true] `method product[B >: B](implicit num: Numeric[B])B`
-[accessible: true] `method reduceLeftOption[B >: B](op: (B, B) => B)Option[B]`
-[accessible: true] `method reduceLeft[B >: B](op: (B, B) => B)B`
-[accessible: true] `method reduceOption[A1 >: B](op: (A1, A1) => A1)Option[A1]`
-[accessible: true] `method reduceRightOption[B >: B](op: (B, B) => B)Option[B]`
-[accessible: true] `method reduceRight[B >: B](op: (B, B) => B)B`
-[accessible: true] `method reduce[A1 >: B](op: (A1, A1) => A1)A1`
-[accessible: true] `method remove()Unit`
-[accessible: true] `method sameElements(that: Iterator[_])Boolean`
-[accessible: true] `method scanLeft[B](z: B)(op: (B, B) => B)Iterator[B]`
-[accessible: true] `method scanRight[B](z: B)(op: (B, B) => B)Iterator[B]`
-[accessible: true] `method seq=> Iterator[B]`
-[accessible: true] `method size=> Int`
-[accessible: true] `method slice(from: Int, until: Int)Iterator[B]`
-[accessible: true] `method sliding[B >: B](size: Int, step: Int)Iterator[B]#GroupedIterator[B]`
-[accessible: true] `method span(p: B => Boolean)(Iterator[B], Iterator[B])`
-[accessible: true] `method sum[B >: B](implicit num: Numeric[B])B`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method take(n: Int)Iterator[B]`
-[accessible: true] `method takeWhile(p: B => Boolean)Iterator[B]`
-[accessible: true] `method toArray[B >: B](implicit evidence$1: scala.reflect.ClassTag[B])Array[B]`
-[accessible: true] `method toBuffer[B >: B]=> scala.collection.mutable.Buffer[B]`
-[accessible: true] `method toIndexedSeq=> scala.collection.immutable.IndexedSeq[B]`
-[accessible: true] `method toIterable=> Iterable[B]`
-[accessible: true] `method toIterator=> Iterator[B]`
-[accessible: true] `method toList=> List[B]`
-[accessible: true] `method toMap[T, U](implicit ev: <:<[B,(T, U)])scala.collection.immutable.Map[T,U]`
-[accessible: true] `method toSeq=> Seq[B]`
-[accessible: true] `method toSet[B >: B]=> scala.collection.immutable.Set[B]`
-[accessible: true] `method toStream=> scala.collection.immutable.Stream[B]`
-[accessible: true] `method toString()String`
-[accessible: true] `method toTraversable=> Traversable[B]`
-[accessible: true] `method toVector=> Vector[B]`
-[accessible: true] `method to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method withFilter(p: B => Boolean)Iterator[B]`
-[accessible: true] `method zipAll[B, A1 >: B, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1)Iterator[(A1, B1)]`
-[accessible: true] `method zipWithIndex=> Iterator[(B, Int)]`
-[accessible: true] `method zip[B](that: Iterator[B])Iterator[(B, B)]`
-[accessible: true] `method →[B](y: B)(java.util.Iterator[B], B)`
-[accessible: true] `value __leftOfArrowjava.util.Iterator[B]`
-[accessible: true] `value __resultOfEnsuringjava.util.Iterator[B]`
-[accessible: true] `value __stringToFormatjava.util.Iterator[B]`
-[accessible: true] `value __thingToAddjava.util.Iterator[B]`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method reversed=> List[B]`
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+[inaccessible] protected[this] def reversed: List[B]
+class GroupedIterator[B >: A] extends AbstractIterator[Seq[B]] with Iterator[Seq[B]]
+def +(other: String): String
+def ++[B >: B](that: => scala.collection.GenTraversableOnce[B]): Iterator[B]
+def ->[B](y: B): (java.util.Iterator[B], B)
+def /:[B](z: B)(op: (B, B) => B): B
+def :\[B](z: B)(op: (B, B) => B): B
+def addString(b: StringBuilder): StringBuilder
+def addString(b: StringBuilder,sep: String): StringBuilder
+def addString(b: StringBuilder,start: String,sep: String,end: String): StringBuilder
+def aggregate[B](z: => B)(seqop: (B, B) => B,combop: (B, B) => B): B
+def buffered: scala.collection.BufferedIterator[B]
+def collectFirst[B](pf: PartialFunction[B,B]): Option[B]
+def collect[B](pf: PartialFunction[B,B]): Iterator[B]
+def contains(elem: Any): Boolean
+def copyToArray[B >: B](xs: Array[B]): Unit
+def copyToArray[B >: B](xs: Array[B],start: Int): Unit
+def copyToArray[B >: B](xs: Array[B],start: Int,len: Int): Unit
+def copyToBuffer[B >: B](dest: scala.collection.mutable.Buffer[B]): Unit
+def corresponds[B](that: scala.collection.GenTraversableOnce[B])(p: (B, B) => Boolean): Boolean
+def count(p: B => Boolean): Int
+def drop(n: Int): Iterator[B]
+def dropWhile(p: B => Boolean): Iterator[B]
+def duplicate: (Iterator[B], Iterator[B])
+def ensuring(cond: Boolean): java.util.Iterator[B]
+def ensuring(cond: Boolean,msg: => Any): java.util.Iterator[B]
+def ensuring(cond: java.util.Iterator[B] => Boolean): java.util.Iterator[B]
+def ensuring(cond: java.util.Iterator[B] => Boolean,msg: => Any): java.util.Iterator[B]
+def equals(x$1: Any): Boolean
+def exists(p: B => Boolean): Boolean
+def filter(p: B => Boolean): Iterator[B]
+def filterNot(p: B => Boolean): Iterator[B]
+def find(p: B => Boolean): Option[B]
+def flatMap[B](f: B => scala.collection.GenTraversableOnce[B]): Iterator[B]
+def foldLeft[B](z: B)(op: (B, B) => B): B
+def foldRight[B](z: B)(op: (B, B) => B): B
+def fold[A1 >: B](z: A1)(op: (A1, A1) => A1): A1
+def forall(p: B => Boolean): Boolean
+def foreach[U](f: B => U): Unit
+def formatted(fmtstr: String): String
+def grouped[B >: B](size: Int): Iterator[B]#GroupedIterator[B]
+def hasDefiniteSize: Boolean
+def hasNext(): Boolean
+def hashCode(): Int
+def indexOf[B >: B](elem: B): Int
+def indexWhere(p: B => Boolean): Int
+def isEmpty: Boolean
+def isTraversableAgain: Boolean
+def length: Int
+def map[B](f: B => B): Iterator[B]
+def maxBy[B](f: B => B)(implicit cmp: Ordering[B]): B
+def max[B >: B](implicit cmp: Ordering[B]): B
+def minBy[B](f: B => B)(implicit cmp: Ordering[B]): B
+def min[B >: B](implicit cmp: Ordering[B]): B
+def mkString(sep: String): String
+def mkString(start: String,sep: String,end: String): String
+def mkString: String
+def next(): B
+def nonEmpty: Boolean
+def padTo[A1 >: B](len: Int,elem: A1): Iterator[A1]
+def partition(p: B => Boolean): (Iterator[B], Iterator[B])
+def patch[B >: B](from: Int,patchElems: Iterator[B],replaced: Int): Iterator[B]
+def product[B >: B](implicit num: Numeric[B]): B
+def reduceLeftOption[B >: B](op: (B, B) => B): Option[B]
+def reduceLeft[B >: B](op: (B, B) => B): B
+def reduceOption[A1 >: B](op: (A1, A1) => A1): Option[A1]
+def reduceRightOption[B >: B](op: (B, B) => B): Option[B]
+def reduceRight[B >: B](op: (B, B) => B): B
+def reduce[A1 >: B](op: (A1, A1) => A1): A1
+def remove(): Unit
+def sameElements(that: Iterator[_]): Boolean
+def scanLeft[B](z: B)(op: (B, B) => B): Iterator[B]
+def scanRight[B](z: B)(op: (B, B) => B): Iterator[B]
+def seq: Iterator[B]
+def size: Int
+def slice(from: Int,until: Int): Iterator[B]
+def sliding[B >: B](size: Int,step: Int): Iterator[B]#GroupedIterator[B]
+def span(p: B => Boolean): (Iterator[B], Iterator[B])
+def sum[B >: B](implicit num: Numeric[B]): B
+def take(n: Int): Iterator[B]
+def takeWhile(p: B => Boolean): Iterator[B]
+def toArray[B >: B](implicit evidence$1: scala.reflect.ClassTag[B]): Array[B]
+def toBuffer[B >: B]: scala.collection.mutable.Buffer[B]
+def toIndexedSeq: scala.collection.immutable.IndexedSeq[B]
+def toIterable: Iterable[B]
+def toIterator: Iterator[B]
+def toList: List[B]
+def toMap[T, U](implicit ev: <:<[B,(T, U)]): scala.collection.immutable.Map[T,U]
+def toSeq: Seq[B]
+def toSet[B >: B]: scala.collection.immutable.Set[B]
+def toStream: scala.collection.immutable.Stream[B]
+def toString(): String
+def toTraversable: Traversable[B]
+def toVector: Vector[B]
+def to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]]): Col[B]
+def withFilter(p: B => Boolean): Iterator[B]
+def zipAll[B, A1 >: B, B1 >: B](that: Iterator[B],thisElem: A1,thatElem: B1): Iterator[(A1, B1)]
+def zipWithIndex: Iterator[(B, Int)]
+def zip[B](that: Iterator[B]): Iterator[(B, B)]
+def →[B](y: B): (java.util.Iterator[B], B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: java.util.Iterator[B]
+private[this] val __resultOfEnsuring: java.util.Iterator[B]
+private[this] val __stringToFormat: java.util.Iterator[B]
+private[this] val __thingToAdd: java.util.Iterator[B]
================================================================================
diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check
index 6a23facc78..acf7e1a0fd 100644
--- a/test/files/presentation/implicit-member.check
+++ b/test/files/presentation/implicit-member.check
@@ -2,41 +2,41 @@ reload: ImplicitMember.scala
askTypeCompletion at ImplicitMember.scala(7,7)
================================================================================
-[response] aksTypeCompletion at (7,7)
+[response] askCompletionAt (7,7)
retrieved 38 members
-[accessible: true] `class AppliedImplicitImplicit.AppliedImplicit`
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Implicit.type, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method AppliedImplicit[A](x: A)Implicit.AppliedImplicit[A]`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method clone()Object`
-[accessible: true] `method ensuring(cond: Boolean)Implicit.type`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Implicit.type`
-[accessible: true] `method ensuring(cond: Implicit.type => Boolean)Implicit.type`
-[accessible: true] `method ensuring(cond: Implicit.type => Boolean, msg: => Any)Implicit.type`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method finalize()Unit`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Implicit.type, B)`
-[accessible: true] `value __leftOfArrowImplicit.type`
-[accessible: true] `value __resultOfEnsuringImplicit.type`
-[accessible: true] `value __stringToFormatImplicit.type`
-[accessible: true] `value __thingToAddImplicit.type`
-[accessible: true] `value xImplicit.type`
+def +(other: String): String
+def ->[B](y: B): (Implicit.type, B)
+def ensuring(cond: Boolean): Implicit.type
+def ensuring(cond: Boolean,msg: => Any): Implicit.type
+def ensuring(cond: Implicit.type => Boolean): Implicit.type
+def ensuring(cond: Implicit.type => Boolean,msg: => Any): Implicit.type
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (Implicit.type, B)
+final class AppliedImplicit[A] extends AnyRef
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+implicit def AppliedImplicit[A](x: A): Implicit.AppliedImplicit[A]
+private[this] val __leftOfArrow: Implicit.type
+private[this] val __resultOfEnsuring: Implicit.type
+private[this] val __stringToFormat: Implicit.type
+private[this] val __thingToAdd: Implicit.type
+private[this] val x: Implicit.type
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
================================================================================
diff --git a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
index 950569c880..1ddeb6ac4a 100644
--- a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
+++ b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
@@ -2,8 +2,8 @@ import java.io.PrintWriter
import java.io.FileOutputStream
import java.util.Calendar
+import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.interactive.tests._
-import scala.tools.nsc.util._
import scala.tools.nsc.io._
import scala.tools.nsc.doc
diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check
index c7a5d0b5d1..be80601e11 100644
--- a/test/files/presentation/ping-pong.check
+++ b/test/files/presentation/ping-pong.check
@@ -2,101 +2,101 @@ reload: PingPong.scala
askTypeCompletion at PingPong.scala(10,23)
================================================================================
-[response] aksTypeCompletion at (10,23)
+[response] askCompletionAt (10,23)
retrieved 39 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Pong, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)Pong`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Pong`
-[accessible: true] `method ensuring(cond: Pong => Boolean)Pong`
-[accessible: true] `method ensuring(cond: Pong => Boolean, msg: => Any)Pong`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method poke()Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Pong, B)`
-[accessible: true] `value __leftOfArrowPong`
-[accessible: true] `value __resultOfEnsuringPong`
-[accessible: true] `value __stringToFormatPong`
-[accessible: true] `value __thingToAddPong`
-[accessible: true] `value nameString`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `value pingPing`
+[inaccessible] private[this] val ping: Ping
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Pong, B)
+def ensuring(cond: Boolean): Pong
+def ensuring(cond: Boolean,msg: => Any): Pong
+def ensuring(cond: Pong => Boolean): Pong
+def ensuring(cond: Pong => Boolean,msg: => Any): Pong
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def poke(): Unit
+def →[B](y: B): (Pong, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+override def toString(): String
+private[this] val __leftOfArrow: Pong
+private[this] val __resultOfEnsuring: Pong
+private[this] val __stringToFormat: Pong
+private[this] val __thingToAdd: Pong
+private[this] val name: String
================================================================================
askTypeCompletion at PingPong.scala(19,20)
================================================================================
-[response] aksTypeCompletion at (19,20)
+[response] askCompletionAt (19,20)
retrieved 39 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(Ping, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)Ping`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Ping`
-[accessible: true] `method ensuring(cond: Ping => Boolean)Ping`
-[accessible: true] `method ensuring(cond: Ping => Boolean, msg: => Any)Ping`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method loop=> Unit`
-[accessible: true] `method name=> String`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method poke=> Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(Ping, B)`
-[accessible: true] `value __leftOfArrowPing`
-[accessible: true] `value __resultOfEnsuringPing`
-[accessible: true] `value __stringToFormatPing`
-[accessible: true] `value __thingToAddPing`
-[accessible: true] `value pongPong`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (Ping, B)
+def ensuring(cond: Boolean): Ping
+def ensuring(cond: Boolean,msg: => Any): Ping
+def ensuring(cond: Ping => Boolean): Ping
+def ensuring(cond: Ping => Boolean,msg: => Any): Ping
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def loop: Unit
+def name: String
+def poke: Unit
+def →[B](y: B): (Ping, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+override def toString(): String
+private[this] val __leftOfArrow: Ping
+private[this] val __resultOfEnsuring: Ping
+private[this] val __stringToFormat: Ping
+private[this] val __thingToAdd: Ping
+private[this] val pong: Pong
================================================================================
askType at PingPong.scala(8,10)
================================================================================
-[response] askTypeAt at (8,10)
+[response] askTypeAt (8,10)
def loop: Unit = Ping.this.poke()
================================================================================
askType at PingPong.scala(10,10)
================================================================================
-[response] askTypeAt at (10,10)
+[response] askTypeAt (10,10)
def poke: Unit = Ping.this.pong.poke()
================================================================================
askType at PingPong.scala(17,10)
================================================================================
-[response] askTypeAt at (17,10)
+[response] askTypeAt (17,10)
private[this] val name: String = "pong"
================================================================================
diff --git a/test/files/presentation/random.check b/test/files/presentation/random.check
index fce4b69fb3..fb3500aeea 100644
--- a/test/files/presentation/random.check
+++ b/test/files/presentation/random.check
@@ -2,7 +2,7 @@ reload: Random.scala
askType at Random.scala(18,14)
================================================================================
-[response] askTypeAt at (18,14)
+[response] askTypeAt (18,14)
val filter: Int => Boolean = try {
java.this.lang.Integer.parseInt(args.apply(0)) match {
case 1 => ((x: Int) => x.%(2).!=(0))
@@ -16,12 +16,12 @@ val filter: Int => Boolean = try {
askType at Random.scala(19,30)
================================================================================
-[response] askTypeAt at (19,30)
+[response] askTypeAt (19,30)
0
================================================================================
askType at Random.scala(26,12)
================================================================================
-[response] askTypeAt at (26,12)
+[response] askTypeAt (26,12)
_
================================================================================
diff --git a/test/files/presentation/t1207.check b/test/files/presentation/t1207.check
new file mode 100644
index 0000000000..84bfd79d75
--- /dev/null
+++ b/test/files/presentation/t1207.check
@@ -0,0 +1,53 @@
+reload: Completions.scala
+
+askTypeCompletion at Completions.scala(10,15)
+================================================================================
+[response] askCompletionAt (10,15)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(11,16)
+================================================================================
+[response] askCompletionAt (11,16)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(12,19)
+================================================================================
+[response] askCompletionAt (12,19)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(13,19)
+================================================================================
+[response] askCompletionAt (13,19)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(14,23)
+================================================================================
+[response] askCompletionAt (14,23)
+retrieved 3 members
+final package bongo
+final package lang
+final package util
+================================================================================
+
+askTypeCompletion at Completions.scala(15,10)
+================================================================================
+[response] askCompletionAt (15,10)
+retrieved 0 members
+
+================================================================================
diff --git a/test/files/presentation/t1207/Test.scala b/test/files/presentation/t1207/Test.scala
new file mode 100644
index 0000000000..bec1131c4c
--- /dev/null
+++ b/test/files/presentation/t1207/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/t1207/src/Completions.scala b/test/files/presentation/t1207/src/Completions.scala
new file mode 100644
index 0000000000..804d4fdc3d
--- /dev/null
+++ b/test/files/presentation/t1207/src/Completions.scala
@@ -0,0 +1,20 @@
+package other {
+ package bongo { }
+ package lang { }
+ package util {
+ package boogly
+ }
+}
+
+package ticket_1001207 {
+ import other./*!*/
+ import other.u/*!*/
+ import other.uti /*!*/
+ import other.util/*!*/
+ import other.{lang, u/*!*/}
+ import j/*!*/
+
+ class T1207 {
+
+ }
+}
diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check
index 4fc7a56426..b2cedd689f 100644
--- a/test/files/presentation/t5708.check
+++ b/test/files/presentation/t5708.check
@@ -2,46 +2,46 @@ reload: Completions.scala
askTypeCompletion at Completions.scala(17,9)
================================================================================
-[response] aksTypeCompletion at (17,9)
+[response] askCompletionAt (17,9)
retrieved 43 members
-[accessible: true] `lazy value fooInt`
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(test.Compat.type, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)test.Compat.type`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)test.Compat.type`
-[accessible: true] `method ensuring(cond: test.Compat.type => Boolean)test.Compat.type`
-[accessible: true] `method ensuring(cond: test.Compat.type => Boolean, msg: => Any)test.Compat.type`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method pkgPrivateM=> String`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(test.Compat.type, B)`
-[accessible: true] `value CONST_STRINGString("constant")`
-[accessible: true] `value __leftOfArrowtest.Compat.type`
-[accessible: true] `value __resultOfEnsuringtest.Compat.type`
-[accessible: true] `value __stringToFormattest.Compat.type`
-[accessible: true] `value __thingToAddtest.Compat.type`
-[accessible: true] `value pkgPrivateVString`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method privateM=> String`
-[accessible: false] `method protectedValM=> String`
-[accessible: false] `value privateVString`
-[accessible: false] `value protectedVString`
+[inaccessible] private def privateM: String
+[inaccessible] private[this] val privateV: String
+[inaccessible] private[this] val protectedV: String
+[inaccessible] protected def protectedValM: String
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (test.Compat.type, B)
+def ensuring(cond: Boolean): test.Compat.type
+def ensuring(cond: Boolean,msg: => Any): test.Compat.type
+def ensuring(cond: test.Compat.type => Boolean): test.Compat.type
+def ensuring(cond: test.Compat.type => Boolean,msg: => Any): test.Compat.type
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def toString(): String
+def →[B](y: B): (test.Compat.type, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+final private[this] val CONST_STRING: String("constant")
+lazy private[this] var foo: Int
+private[package test] def pkgPrivateM: String
+private[this] val __leftOfArrow: test.Compat.type
+private[this] val __resultOfEnsuring: test.Compat.type
+private[this] val __stringToFormat: test.Compat.type
+private[this] val __thingToAdd: test.Compat.type
+private[this] val pkgPrivateV: String
================================================================================
diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check
index e9b349ac06..4ba7dbaad9 100644
--- a/test/files/presentation/visibility.check
+++ b/test/files/presentation/visibility.check
@@ -2,220 +2,220 @@ reload: Completions.scala
askTypeCompletion at Completions.scala(14,12)
================================================================================
-[response] aksTypeCompletion at (14,12)
+[response] askCompletionAt (14,12)
retrieved 41 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method clone()Object`
-[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method finalize()Unit`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method secretPrivate()Unit`
-[accessible: true] `method secretProtected()Unit`
-[accessible: true] `method secretProtectedInPackage()Unit`
-[accessible: true] `method secretPublic()Unit`
-[accessible: true] `method someTests(other: accessibility.Foo)Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `value __leftOfArrowaccessibility.Foo`
-[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value __stringToFormataccessibility.Foo`
-[accessible: true] `value __thingToAddaccessibility.Foo`
-[accessible: false] `method secretPrivateThis()Unit`
+[inaccessible] private[this] def secretPrivateThis(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private def secretPrivate(): Unit
+private[this] val __leftOfArrow: accessibility.Foo
+private[this] val __resultOfEnsuring: accessibility.Foo
+private[this] val __stringToFormat: accessibility.Foo
+private[this] val __thingToAdd: accessibility.Foo
+protected def secretProtected(): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
================================================================================
askTypeCompletion at Completions.scala(16,11)
================================================================================
-[response] aksTypeCompletion at (16,11)
+[response] askCompletionAt (16,11)
retrieved 41 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method clone()Object`
-[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method finalize()Unit`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method secretPrivate()Unit`
-[accessible: true] `method secretPrivateThis()Unit`
-[accessible: true] `method secretProtected()Unit`
-[accessible: true] `method secretProtectedInPackage()Unit`
-[accessible: true] `method secretPublic()Unit`
-[accessible: true] `method someTests(other: accessibility.Foo)Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `value __leftOfArrowaccessibility.Foo`
-[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value __stringToFormataccessibility.Foo`
-[accessible: true] `value __thingToAddaccessibility.Foo`
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private def secretPrivate(): Unit
+private[this] def secretPrivateThis(): Unit
+private[this] val __leftOfArrow: accessibility.Foo
+private[this] val __resultOfEnsuring: accessibility.Foo
+private[this] val __stringToFormat: accessibility.Foo
+private[this] val __thingToAdd: accessibility.Foo
+protected def secretProtected(): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
================================================================================
askTypeCompletion at Completions.scala(22,11)
================================================================================
-[response] aksTypeCompletion at (22,11)
+[response] askCompletionAt (22,11)
retrieved 41 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(accessibility.AccessibilityChecks, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method clone()Object`
-[accessible: true] `method ensuring(cond: Boolean)accessibility.AccessibilityChecks`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.AccessibilityChecks`
-[accessible: true] `method ensuring(cond: accessibility.AccessibilityChecks => Boolean)accessibility.AccessibilityChecks`
-[accessible: true] `method ensuring(cond: accessibility.AccessibilityChecks => Boolean, msg: => Any)accessibility.AccessibilityChecks`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method finalize()Unit`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method secretProtected()Unit`
-[accessible: true] `method secretProtectedInPackage()Unit`
-[accessible: true] `method secretPublic()Unit`
-[accessible: true] `method someTests(other: accessibility.Foo)Unit`
-[accessible: true] `method someTests=> Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(accessibility.AccessibilityChecks, B)`
-[accessible: true] `value __leftOfArrowaccessibility.AccessibilityChecks`
-[accessible: true] `value __resultOfEnsuringaccessibility.AccessibilityChecks`
-[accessible: true] `value __stringToFormataccessibility.AccessibilityChecks`
-[accessible: true] `value __thingToAddaccessibility.AccessibilityChecks`
-[accessible: false] `method secretPrivate()Unit`
+[inaccessible] private def secretPrivate(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.AccessibilityChecks, B)
+def ensuring(cond: Boolean): accessibility.AccessibilityChecks
+def ensuring(cond: Boolean,msg: => Any): accessibility.AccessibilityChecks
+def ensuring(cond: accessibility.AccessibilityChecks => Boolean): accessibility.AccessibilityChecks
+def ensuring(cond: accessibility.AccessibilityChecks => Boolean,msg: => Any): accessibility.AccessibilityChecks
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def someTests: Unit
+def toString(): String
+def →[B](y: B): (accessibility.AccessibilityChecks, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: accessibility.AccessibilityChecks
+private[this] val __resultOfEnsuring: accessibility.AccessibilityChecks
+private[this] val __stringToFormat: accessibility.AccessibilityChecks
+private[this] val __thingToAdd: accessibility.AccessibilityChecks
+protected def secretProtected(): Unit
+protected[package accessibility] def secretProtectedInPackage(): Unit
+protected[package lang] def clone(): Object
+protected[package lang] def finalize(): Unit
================================================================================
askTypeCompletion at Completions.scala(28,10)
================================================================================
-[response] aksTypeCompletion at (28,10)
+[response] askCompletionAt (28,10)
retrieved 41 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method secretProtectedInPackage()Unit`
-[accessible: true] `method secretPublic()Unit`
-[accessible: true] `method someTests(other: accessibility.Foo)Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `value __leftOfArrowaccessibility.Foo`
-[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value __stringToFormataccessibility.Foo`
-[accessible: true] `value __thingToAddaccessibility.Foo`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method secretPrivate()Unit`
-[accessible: false] `method secretPrivateThis()Unit`
-[accessible: false] `method secretProtected()Unit`
+[inaccessible] private def secretPrivate(): Unit
+[inaccessible] private[this] def secretPrivateThis(): Unit
+[inaccessible] protected def secretProtected(): Unit
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: accessibility.Foo
+private[this] val __resultOfEnsuring: accessibility.Foo
+private[this] val __stringToFormat: accessibility.Foo
+private[this] val __thingToAdd: accessibility.Foo
+protected[package accessibility] def secretProtectedInPackage(): Unit
================================================================================
askTypeCompletion at Completions.scala(37,8)
================================================================================
-[response] aksTypeCompletion at (37,8)
+[response] askCompletionAt (37,8)
retrieved 41 members
-[accessible: true] `method !=(x$1: Any)Boolean`
-[accessible: true] `method !=(x$1: AnyRef)Boolean`
-[accessible: true] `method ##()Int`
-[accessible: true] `method +(other: String)String`
-[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `method ==(x$1: Any)Boolean`
-[accessible: true] `method ==(x$1: AnyRef)Boolean`
-[accessible: true] `method asInstanceOf[T0]=> T0`
-[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
-[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
-[accessible: true] `method eq(x$1: AnyRef)Boolean`
-[accessible: true] `method equals(x$1: Any)Boolean`
-[accessible: true] `method formatted(fmtstr: String)String`
-[accessible: true] `method hashCode()Int`
-[accessible: true] `method isInstanceOf[T0]=> Boolean`
-[accessible: true] `method ne(x$1: AnyRef)Boolean`
-[accessible: true] `method notify()Unit`
-[accessible: true] `method notifyAll()Unit`
-[accessible: true] `method secretPublic()Unit`
-[accessible: true] `method someTests(other: accessibility.Foo)Unit`
-[accessible: true] `method synchronized[T0](x$1: T0)T0`
-[accessible: true] `method toString()String`
-[accessible: true] `method wait()Unit`
-[accessible: true] `method wait(x$1: Long)Unit`
-[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
-[accessible: true] `value __leftOfArrowaccessibility.Foo`
-[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value __stringToFormataccessibility.Foo`
-[accessible: true] `value __thingToAddaccessibility.Foo`
-[accessible: false] `method clone()Object`
-[accessible: false] `method finalize()Unit`
-[accessible: false] `method secretPrivate()Unit`
-[accessible: false] `method secretPrivateThis()Unit`
-[accessible: false] `method secretProtected()Unit`
-[accessible: false] `method secretProtectedInPackage()Unit`
+[inaccessible] private def secretPrivate(): Unit
+[inaccessible] private[this] def secretPrivateThis(): Unit
+[inaccessible] protected def secretProtected(): Unit
+[inaccessible] protected[package accessibility] def secretProtectedInPackage(): Unit
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def +(other: String): String
+def ->[B](y: B): (accessibility.Foo, B)
+def ensuring(cond: Boolean): accessibility.Foo
+def ensuring(cond: Boolean,msg: => Any): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean): accessibility.Foo
+def ensuring(cond: accessibility.Foo => Boolean,msg: => Any): accessibility.Foo
+def equals(x$1: Any): Boolean
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def secretPublic(): Unit
+def someTests(other: accessibility.Foo): Unit
+def toString(): String
+def →[B](y: B): (accessibility.Foo, B)
+final def !=(x$1: Any): Boolean
+final def !=(x$1: AnyRef): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def ==(x$1: AnyRef): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+private[this] val __leftOfArrow: accessibility.Foo
+private[this] val __resultOfEnsuring: accessibility.Foo
+private[this] val __stringToFormat: accessibility.Foo
+private[this] val __thingToAdd: accessibility.Foo
================================================================================
diff --git a/test/files/run/Course-2002-01.check b/test/files/run/Course-2002-01.check
index 17b30bf3c2..16b491d6e2 100644
--- a/test/files/run/Course-2002-01.check
+++ b/test/files/run/Course-2002-01.check
@@ -1,3 +1,6 @@
+Course-2002-01.scala:41: warning: method loop in object M0 does nothing other than call itself recursively
+ def loop: Int = loop;
+ ^
232
667
11
diff --git a/test/files/run/Meter.check b/test/files/run/Meter.check
index b7e2eac125..c79c51a294 100644
--- a/test/files/run/Meter.check
+++ b/test/files/run/Meter.check
@@ -1,3 +1,6 @@
+Meter.scala:72: warning: a.Meter and Int are unrelated: they will never compare equal
+ println("x == 1: "+(x == 1))
+ ^
2.0
4.0m
false
diff --git a/test/files/run/MeterCaseClass.check b/test/files/run/MeterCaseClass.check
index 2528753657..2782704f9f 100644
--- a/test/files/run/MeterCaseClass.check
+++ b/test/files/run/MeterCaseClass.check
@@ -1,3 +1,6 @@
+MeterCaseClass.scala:69: warning: comparing values of types a.Meter and Int using `==' will always yield false
+ println("x == 1: "+(x == 1))
+ ^
2.0
Meter(4.0)
false
diff --git a/test/files/run/Predef.readLine.scala b/test/files/run/Predef.readLine.scala
index 9f07936638..54809f8071 100644
--- a/test/files/run/Predef.readLine.scala
+++ b/test/files/run/Predef.readLine.scala
@@ -1,4 +1,5 @@
import java.io.StringReader
+import scala.io.ReadStdin.readLine
object Test extends App {
Console.withIn(new StringReader("")) {
@@ -7,4 +8,4 @@ object Test extends App {
readLine("%s prompt\n", "fancy")
readLine("%s %s prompt\n", "immensely", "fancy")
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala
index 53caa5e62f..d5948ea168 100644
--- a/test/files/run/SymbolsTest.scala
+++ b/test/files/run/SymbolsTest.scala
@@ -1,6 +1,5 @@
-
-
+import scala.language.reflectiveCalls
class Slazz {
val s1 = 'myFirstSymbol
diff --git a/test/files/run/abstypetags_serialize.scala b/test/files/run/abstypetags_serialize.scala
index 93fb5dcd06..6ec97105fe 100644
--- a/test/files/run/abstypetags_serialize.scala
+++ b/test/files/run/abstypetags_serialize.scala
@@ -1,3 +1,4 @@
+import scala.language.higherKinds
import java.io._
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{universe => ru}
@@ -30,4 +31,4 @@ object Test extends App {
}
qwe
-} \ No newline at end of file
+}
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
index 297bd36bae..e3ab554d4c 100644
--- a/test/files/run/analyzerPlugins.check
+++ b/test/files/run/analyzerPlugins.check
@@ -7,7 +7,7 @@ annotationsConform(Int @testAnn, Int) [2]
annotationsConform(Int(1) @testAnn, Int) [1]
annotationsConform(Int(1), Int @testAnn) [1]
annotationsConform(Nothing, Int @testAnn) [2]
-annotationsConform(String @testAnn, String) [1]
+annotationsConform(String @testAnn, String) [2]
canAdaptAnnotations(Trees$Ident, String) [1]
canAdaptAnnotations(Trees$Select, ?) [1]
canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1]
@@ -25,13 +25,13 @@ pluginsPt(?, Trees$Assign) [7]
pluginsPt(?, Trees$Block) [4]
pluginsPt(?, Trees$ClassDef) [2]
pluginsPt(?, Trees$DefDef) [14]
-pluginsPt(?, Trees$Ident) [51]
+pluginsPt(?, Trees$Ident) [50]
pluginsPt(?, Trees$If) [2]
pluginsPt(?, Trees$Literal) [16]
pluginsPt(?, Trees$New) [5]
pluginsPt(?, Trees$PackageDef) [1]
pluginsPt(?, Trees$Return) [1]
-pluginsPt(?, Trees$Select) [52]
+pluginsPt(?, Trees$Select) [48]
pluginsPt(?, Trees$Super) [2]
pluginsPt(?, Trees$This) [20]
pluginsPt(?, Trees$TypeApply) [4]
@@ -112,7 +112,6 @@ pluginsTyped(<notype>, Trees$DefDef) [14]
pluginsTyped(<notype>, Trees$PackageDef) [1]
pluginsTyped(<notype>, Trees$TypeDef) [1]
pluginsTyped(<notype>, Trees$ValDef) [21]
-pluginsTyped(<root>, Trees$Ident) [1]
pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
pluginsTyped(=> Double, Trees$Select) [4]
pluginsTyped(=> Int, Trees$Select) [5]
@@ -151,7 +150,6 @@ pluginsTyped(List[Any], Trees$Apply) [1]
pluginsTyped(List[Any], Trees$Select) [1]
pluginsTyped(List[Any], Trees$TypeTree) [3]
pluginsTyped(Nothing, Trees$Return) [1]
-pluginsTyped(Nothing, Trees$Select) [2]
pluginsTyped(Object, Trees$Apply) [1]
pluginsTyped(String @testAnn, Trees$Ident) [1]
pluginsTyped(String @testAnn, Trees$Select) [1]
@@ -185,8 +183,6 @@ pluginsTyped(scala.annotation.TypeConstraint, Trees$TypeTree) [2]
pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
-pluginsTyped(scala.type, Trees$Ident) [1]
-pluginsTyped(scala.type, Trees$Select) [1]
pluginsTyped(str.type, Trees$Ident) [3]
pluginsTyped(testAnn, Trees$Apply) [5]
pluginsTyped(testAnn, Trees$Ident) [5]
diff --git a/test/files/run/array-existential-bound.scala b/test/files/run/array-existential-bound.scala
index bc442d39f7..b397c98111 100644
--- a/test/files/run/array-existential-bound.scala
+++ b/test/files/run/array-existential-bound.scala
@@ -10,8 +10,8 @@ object Test extends Fooz[Array[Int]] {
def main(args: Array[String]): Unit = {
println(f1.f0(Array[String]("a", "b")))
- println(f2.f0(1 to 1000 toArray))
+ println(f2.f0((1 to 1000).toArray))
println(f3.f0((1 to 1000).toArray[Any]))
- println(f4.f0('a' to 'z' toArray))
+ println(f4.f0(('a' to 'z').toArray))
}
}
diff --git a/test/files/run/arrays.check b/test/files/run/arrays.check
index b1f7fae1d1..c9a3a87268 100644
--- a/test/files/run/arrays.check
+++ b/test/files/run/arrays.check
@@ -1 +1,7 @@
+arrays.scala:248: warning: comparing values of types Unit and Unit using `==' will always yield true
+ check(xs(0) == u0, xs(0), u0);
+ ^
+arrays.scala:249: warning: comparing values of types Unit and Unit using `==' will always yield true
+ check(xs(1) == u1, xs(1), u1);
+ ^
checks: 2302
diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala
index d55f9e4e83..5d49220749 100644
--- a/test/files/run/bitsets.scala
+++ b/test/files/run/bitsets.scala
@@ -4,6 +4,8 @@
//############################################################################
+import scala.language.postfixOps
+
object TestMutable {
import scala.collection.mutable.BitSet
@@ -115,6 +117,24 @@ object TestMutable3 {
println(s"b0:$b0")
}
+/***
+The memory requirements here are way beyond
+what a test should exercise.
+
+object TestMutable4 {
+ import scala.collection.mutable.BitSet
+
+ val bMax = BitSet(Int.MaxValue)
+ println(s"bMax:$bMax")
+ bMax.foreach(println)
+
+ val bLarge = BitSet(2000000001)
+ println(s"bLarge:$bLarge")
+
+ println(bMax == bLarge)
+}
+***/
+
object TestImmutable {
import scala.collection.immutable.BitSet
@@ -190,6 +210,7 @@ object Test extends App {
TestMutable
TestMutable2
TestMutable3
+ // TestMutable4
TestImmutable
TestImmutable2
}
diff --git a/test/files/run/blame_eye_triple_eee.check b/test/files/run/blame_eye_triple_eee-double.check
index 5e46d91a8f..5e46d91a8f 100644
--- a/test/files/run/blame_eye_triple_eee.check
+++ b/test/files/run/blame_eye_triple_eee-double.check
diff --git a/test/files/run/blame_eye_triple_eee.flags b/test/files/run/blame_eye_triple_eee-double.flags
index c9b68d70dc..c9b68d70dc 100644
--- a/test/files/run/blame_eye_triple_eee.flags
+++ b/test/files/run/blame_eye_triple_eee-double.flags
diff --git a/test/files/run/blame_eye_triple_eee.scala b/test/files/run/blame_eye_triple_eee-double.scala
index 1640aead40..1640aead40 100644
--- a/test/files/run/blame_eye_triple_eee.scala
+++ b/test/files/run/blame_eye_triple_eee-double.scala
diff --git a/test/files/run/blame_eye_triple_eee-float.check b/test/files/run/blame_eye_triple_eee-float.check
new file mode 100644
index 0000000000..5e46d91a8f
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-float.check
@@ -0,0 +1,9 @@
+if (NaN == NaN) is good
+if (x == x) is good
+if (x == NaN) is good
+if (NaN != NaN) is good
+if (x != x) is good
+if (NaN != x) is good
+x matching was good
+NaN matching was good
+loop with NaN was goood
diff --git a/test/files/run/blame_eye_triple_eee-float.flags b/test/files/run/blame_eye_triple_eee-float.flags
new file mode 100644
index 0000000000..c9b68d70dc
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-float.flags
@@ -0,0 +1 @@
+-optimise
diff --git a/test/files/run/blame_eye_triple_eee-float.scala b/test/files/run/blame_eye_triple_eee-float.scala
new file mode 100644
index 0000000000..4deb9f3d60
--- /dev/null
+++ b/test/files/run/blame_eye_triple_eee-float.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ import Float.NaN
+
+ // NaN must not equal NaN no matter what optimizations are applied
+ // All the following will seem redundant, but to an optimizer
+ // they can appear different
+
+ val x = NaN
+
+ if (NaN == NaN)
+ println("if (NaN == NaN) is broken")
+ else
+ println("if (NaN == NaN) is good")
+
+ if (x == x)
+ println("if (x == x) is broken")
+ else
+ println("if (x == x) is good")
+
+ if (x == NaN)
+ println("if (x == NaN) is broken")
+ else
+ println("if (x == NaN) is good")
+
+ if (NaN != NaN)
+ println("if (NaN != NaN) is good")
+ else
+ println("if (NaN != NaN) broken")
+
+ if (x != x)
+ println("if (x != x) is good")
+ else
+ println("if (x != x) broken")
+
+ if (NaN != x)
+ println("if (NaN != x) is good")
+ else
+ println("if (NaN != x) is broken")
+
+ x match {
+ case 0.0f => println("x matched 0!")
+ case NaN => println("x matched NaN!")
+ case _ => println("x matching was good")
+ }
+
+ NaN match {
+ case 0.0f => println("NaN matched 0!")
+ case NaN => println("NaN matched NaN!")
+ case _ => println("NaN matching was good")
+ }
+
+ var z = 0.0f
+ var i = 0
+ while (i < 10) {
+ if (i % 2 == 0) z = NaN
+ else z = NaN
+ i += 1
+ }
+ if (z.isNaN && i == 10) println("loop with NaN was goood")
+ else println("loop with NaN was broken")
+}
diff --git a/test/files/run/bridges.scala b/test/files/run/bridges.scala
index fda86eabc6..eb036bd781 100644
--- a/test/files/run/bridges.scala
+++ b/test/files/run/bridges.scala
@@ -3588,7 +3588,7 @@ object Test {
errors = errors + 1;
}
} catch {
- case exception => {
+ case exception: Throwable => {
Console.print(name + " raised exception " + exception);
Console.println;
errors = errors + 1;
diff --git a/test/files/run/bugs.scala b/test/files/run/bugs.scala
index ca598603bb..9d2be42fb7 100644
--- a/test/files/run/bugs.scala
+++ b/test/files/run/bugs.scala
@@ -444,7 +444,7 @@ object Test {
try {
test;
} catch {
- case exception =>
+ case exception: Throwable =>
Console.print("Exception in thread \"" + Thread.currentThread + "\" " + exception);
Console.println;
errors += 1
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
index 9b1e612f4f..378caa7936 100644
--- a/test/files/run/classfile-format-51.scala
+++ b/test/files/run/classfile-format-51.scala
@@ -112,12 +112,12 @@ object Driver {
System.setErr(System.out)
try {
// this test is only valid under JDK 1.7+
- // cheat a little by using 'ScalaVersion' because it can parse java versions just as well
- val requiredJavaVersion = ScalaVersion("1.7")
- val executingJavaVersion = ScalaVersion(System.getProperty("java.specification.version"))
- if (executingJavaVersion >= requiredJavaVersion) {
+ testUnderJavaAtLeast("1.7") {
generateClass()
compile()
+ ()
+ } otherwise {
+ ()
}
}
finally
diff --git a/test/files/run/classfile-format-52.check b/test/files/run/classfile-format-52.check
new file mode 100644
index 0000000000..5d24ef03cc
--- /dev/null
+++ b/test/files/run/classfile-format-52.check
@@ -0,0 +1,2 @@
+hello from publicMethod
+hello from staticMethod
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
new file mode 100644
index 0000000000..7afa09ae0b
--- /dev/null
+++ b/test/files/run/classfile-format-52.scala
@@ -0,0 +1,77 @@
+import java.io.{File, FileOutputStream}
+
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.partest._
+import scala.tools.asm
+import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
+import Opcodes._
+
+// This test ensures that we can read JDK 8 (classfile format 52) files, including those
+// with default methods. To do that it first uses ASM to generate an interface called
+// HasDefaultMethod. Then it runs a normal compile on Scala source that extends that
+// interface. Any failure will be dumped to std out.
+//
+// By it's nature the test can only work on JDK 8+ because under JDK 7- the
+// interface won't verify.
+object Test extends DirectTest {
+ override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ def generateInterface() {
+ val interfaceName = "HasDefaultMethod"
+ val methodType = "()Ljava/lang/String;"
+
+ val cw = new ClassWriter(0)
+ cw.visit(52, ACC_PUBLIC+ACC_ABSTRACT+ACC_INTERFACE, interfaceName, null, "java/lang/Object", null)
+
+ def createMethod(flags:Int, name: String) {
+ val method = cw.visitMethod(flags, name, methodType, null, null)
+ method.visitCode()
+ method.visitLdcInsn(s"hello from $name")
+ method.visitInsn(ARETURN)
+ method.visitMaxs(1, 1)
+ method.visitEnd()
+ }
+
+ createMethod(ACC_PUBLIC, "publicMethod")
+ createMethod(ACC_PUBLIC+ACC_STATIC, "staticMethod")
+ createMethod(ACC_PRIVATE, "privateMethod")
+
+ cw.visitEnd()
+ val bytes = cw.toByteArray()
+
+ val fos = new FileOutputStream(new File(s"${testOutput.path}/$interfaceName.class"))
+ try
+ fos write bytes
+ finally
+ fos.close()
+
+ }
+
+ def code =
+"""
+class Driver extends HasDefaultMethod {
+ println(publicMethod())
+ println(HasDefaultMethod.staticMethod())
+}
+"""
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ try {
+ // this test is only valid under JDK 1.8+
+ testUnderJavaAtLeast("1.8") {
+ generateInterface()
+ compile()
+ Class.forName("Driver").newInstance()
+ ()
+ } otherwise {
+ println("hello from publicMethod")
+ println("hello from staticMethod")
+ }
+ }
+ finally
+ System.setErr(prevErr)
+ }
+}
diff --git a/test/files/run/classmanifests_new_alias.scala b/test/files/run/classmanifests_new_alias.scala
index 12bd93bab6..777bd5dd6d 100644
--- a/test/files/run/classmanifests_new_alias.scala
+++ b/test/files/run/classmanifests_new_alias.scala
@@ -1,5 +1,7 @@
+
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
type CM[T] = ClassManifest[T]
println(implicitly[CM[Int]])
println(implicitly[CM[Int]] eq Manifest.Int)
-} \ No newline at end of file
+}
diff --git a/test/files/run/classmanifests_new_core.scala b/test/files/run/classmanifests_new_core.scala
index 63dbfab25c..0a9c58e8e1 100644
--- a/test/files/run/classmanifests_new_core.scala
+++ b/test/files/run/classmanifests_new_core.scala
@@ -1,4 +1,5 @@
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
println(classManifest[Int])
println(classManifest[Int] eq Manifest.Int)
-} \ No newline at end of file
+}
diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala
index 69c40fae80..1e4e8791ac 100644
--- a/test/files/run/collections.scala
+++ b/test/files/run/collections.scala
@@ -1,5 +1,6 @@
-import collection._
+import scala.collection._
import scala.compat.Platform.currentTime
+import scala.language.postfixOps
object Test extends App {
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 54adeb7cda..9d77fc413a 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -1,4 +1,8 @@
-import collection._
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
+import scala.collection._
+import scala.language.postfixOps
object Test extends App {
diff --git a/test/files/run/comparable-comparator.scala b/test/files/run/comparable-comparator.scala
index ac943c63bb..1707fb6e61 100644
--- a/test/files/run/comparable-comparator.scala
+++ b/test/files/run/comparable-comparator.scala
@@ -15,7 +15,8 @@ object Test {
def compare(p1: C2, p2: C2) = p2.s compareTo p1.s
}
- val strs = "zip foo bar baz aggle bing bong" split ' ' toList
+ val words = "zip foo bar baz aggle bing bong" split ' '
+ val strs = words.toList
val c1s = strs map (x => new C1(x))
val c2s = strs map (x => new C2(x))
diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala
index 0d4b504d3c..bd3db0bf66 100644
--- a/test/files/run/compiler-asSeenFrom.scala
+++ b/test/files/run/compiler-asSeenFrom.scala
@@ -1,6 +1,10 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import scala.tools.nsc._
import scala.tools.partest.CompilerTest
import scala.collection.{ mutable, immutable, generic }
+import scala.language.postfixOps
/** It's too messy but it's better than not having it.
*/
@@ -117,6 +121,5 @@ package ll {
println(sigs.mkString(x + " { // after " + ph + "\n ", "\n ", "\n}\n"))
}
}
- true
}
}
diff --git a/test/files/run/concurrent-stream.scala b/test/files/run/concurrent-stream.scala
index 42c695964e..9d5ba0428e 100644
--- a/test/files/run/concurrent-stream.scala
+++ b/test/files/run/concurrent-stream.scala
@@ -1,32 +1,33 @@
// test concurrent calls to Stream.tail
+@deprecated("Suppress warnings", since="2.11")
object Test {
-def slowRange(from: Int, until: Int, cons: (Int, => Stream[Int]) => Stream[Int]): Stream[Int] = {
- var current = from
- def next: Stream[Int] = {
- Thread.sleep(100)
- if (current >= until) Stream.empty
- else {
- val stream = cons(current, next)
- current += 1
- stream
+ def slowRange(from: Int, until: Int, cons: (Int, => Stream[Int]) => Stream[Int]): Stream[Int] = {
+ var current = from
+ def next: Stream[Int] = {
+ Thread.sleep(100)
+ if (current >= until) Stream.empty
+ else {
+ val stream = cons(current, next)
+ current += 1
+ stream
+ }
}
+ next
}
- next
-}
-def testCons(cons: (Int, => Stream[Int]) => Stream[Int]): Unit = {
- import scala.actors.Actor._
+ def testCons(cons: (Int, => Stream[Int]) => Stream[Int]): Unit = {
+ import scala.actors.Actor._
- val stream = slowRange(0, 10, cons)
- val main = self
- actor { main ! stream.toList }
- actor { main ! stream.toList }
- val eval0 = receive { case list: List[Int] => list }
- val eval1 = receive { case list: List[Int] => list }
- println("Evaluation 0: " + eval0)
- println("Evaluation 1: " + eval1)
-}
+ val stream = slowRange(0, 10, cons)
+ val main = self
+ actor { main ! stream.toList }
+ actor { main ! stream.toList }
+ val eval0 = receive { case list: List[Int @unchecked] => list }
+ val eval1 = receive { case list: List[Int @unchecked] => list }
+ println("Evaluation 0: " + eval0)
+ println("Evaluation 1: " + eval1)
+ }
def main(args: Array[String]) {
println("Testing standard cons.")
diff --git a/test/files/run/contrib674.check b/test/files/run/contrib674.check
new file mode 100644
index 0000000000..78325c1810
--- /dev/null
+++ b/test/files/run/contrib674.check
@@ -0,0 +1,3 @@
+contrib674.scala:15: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 1
+ ^
diff --git a/test/files/run/contrib674.scala b/test/files/run/contrib674.scala
index f6b46d13c6..45c9871fc4 100644
--- a/test/files/run/contrib674.scala
+++ b/test/files/run/contrib674.scala
@@ -5,11 +5,11 @@ object Test extends App {
try {
1
} catch {
- case e =>
+ case e: Throwable =>
} finally {
try {
} catch {
- case e =>
+ case e: Throwable =>
}
}
1
diff --git a/test/files/run/ctries-new/main.scala b/test/files/run/ctries-new/main.scala
index d7fe087e4d..34f3ec2ccf 100644
--- a/test/files/run/ctries-new/main.scala
+++ b/test/files/run/ctries-new/main.scala
@@ -21,6 +21,9 @@ object Test {
trait Spec {
+ implicit def implicitously = scala.language.implicitConversions
+ implicit def reflectively = scala.language.reflectiveCalls
+
implicit def str2ops(s: String) = new {
def in[U](body: =>U) {
// just execute body
@@ -37,11 +40,11 @@ trait Spec {
var produced = false
try body
catch {
- case e => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true
+ case e: Throwable => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true
} finally {
assert(produced, "Did not produce exception of type: " + implicitly[ClassTag[T]])
}
}
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/ctries-old/concmap.scala b/test/files/run/ctries-old/concmap.scala
index 3ec0256afb..fccd4bed3e 100644
--- a/test/files/run/ctries-old/concmap.scala
+++ b/test/files/run/ctries-old/concmap.scala
@@ -2,6 +2,7 @@
import collection.concurrent.TrieMap
+import Test.Spec
object ConcurrentMapSpec extends Spec {
diff --git a/test/files/run/ctries-old/iterator.scala b/test/files/run/ctries-old/iterator.scala
index b953a40e00..585bdc0e48 100644
--- a/test/files/run/ctries-old/iterator.scala
+++ b/test/files/run/ctries-old/iterator.scala
@@ -5,6 +5,7 @@
import collection._
import collection.concurrent.TrieMap
+import Test.Spec
object IteratorSpec extends Spec {
diff --git a/test/files/run/ctries-old/lnode.scala b/test/files/run/ctries-old/lnode.scala
index 92a31088e5..3ff5414ac0 100644
--- a/test/files/run/ctries-old/lnode.scala
+++ b/test/files/run/ctries-old/lnode.scala
@@ -3,6 +3,7 @@
import collection.concurrent.TrieMap
+import Test.Spec
object LNodeSpec extends Spec {
diff --git a/test/files/run/ctries-old/main.scala b/test/files/run/ctries-old/main.scala
index 78ba7f0db1..f714bcdcdc 100644
--- a/test/files/run/ctries-old/main.scala
+++ b/test/files/run/ctries-old/main.scala
@@ -5,6 +5,7 @@
+@deprecated("Suppress warnings", since="2.11")
object Test {
def main(args: Array[String]) {
@@ -14,11 +15,13 @@ object Test {
SnapshotSpec.test()
}
-}
trait Spec {
+ implicit def implicitously = scala.language.implicitConversions
+ implicit def reflectively = scala.language.reflectiveCalls
+
implicit def str2ops(s: String) = new {
def in[U](body: =>U) {
// just execute body
@@ -35,7 +38,7 @@ trait Spec {
var produced = false
try body
catch {
- case e => if (e.getClass == implicitly[ClassManifest[T]].erasure) produced = true
+ case e: Throwable => if (e.getClass == implicitly[ClassManifest[T]].erasure) produced = true
} finally {
assert(produced, "Did not produce exception of type: " + implicitly[ClassManifest[T]])
}
@@ -43,3 +46,4 @@ trait Spec {
}
}
+}
diff --git a/test/files/run/ctries-old/snapshot.scala b/test/files/run/ctries-old/snapshot.scala
index 5fe77d445b..768b588f81 100644
--- a/test/files/run/ctries-old/snapshot.scala
+++ b/test/files/run/ctries-old/snapshot.scala
@@ -5,6 +5,7 @@
import collection._
import collection.concurrent.TrieMap
+import Test.Spec
object SnapshotSpec extends Spec {
diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check
index 9d9c828a03..2ae88267c5 100644
--- a/test/files/run/delay-bad.check
+++ b/test/files/run/delay-bad.check
@@ -1,3 +1,9 @@
+delay-bad.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ f(new C { 5 })
+ ^
+delay-bad.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ f(new { val x = 5 } with E() { 5 })
+ ^
// new C { }
diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check
index 8eb04c7cff..b4f6b04af7 100644
--- a/test/files/run/delay-good.check
+++ b/test/files/run/delay-good.check
@@ -1,3 +1,9 @@
+delay-good.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ f(new C { 5 })
+ ^
+delay-good.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ f(new { val x = 5 } with E() { 5 })
+ ^
// new C { }
diff --git a/test/files/run/distinct.scala b/test/files/run/distinct.scala
index 09e5a0734a..37bbe7805a 100644
--- a/test/files/run/distinct.scala
+++ b/test/files/run/distinct.scala
@@ -3,7 +3,7 @@
*/
object Test {
val alphabet = 'a' to 'z' mkString ""
- val alphaList = 'a' to 'z' toList
+ val alphaList = ('a' to 'z').toList
def shuffled = util.Random.shuffle(alphaList)
def main(args: Array[String]): Unit = {
diff --git a/test/files/run/duration-coarsest.scala b/test/files/run/duration-coarsest.scala
new file mode 100644
index 0000000000..51cb79287a
--- /dev/null
+++ b/test/files/run/duration-coarsest.scala
@@ -0,0 +1,28 @@
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+object Test extends App {
+ List(
+ (60 minutes, 1 hour),
+ (2000 millis, 2 seconds),
+ (2000 micros, 2 millis),
+ (2000 nanos, 2 micros),
+ (2000000 nanos, 2 millis),
+ (48 hours, 2 days),
+ (5 seconds, 5 seconds),
+ (1 second, 1 second)
+ ) foreach {
+ case (x, expected) =>
+ val actual = x.toCoarsest
+ assert(actual.unit == expected.unit, s"$actual, $expected")
+ assert(actual.length == expected.length, s"$actual, $expected")
+ }
+
+ List(
+ 45 minutes,
+ 500 millis,
+ 1500 millis,
+ 23 hours,
+ 40 days
+ ) foreach (x => assert(x == x.toCoarsest, x))
+} \ No newline at end of file
diff --git a/test/files/run/enrich-gentraversable.scala b/test/files/run/enrich-gentraversable.scala
index 52eded55fd..0f79a47030 100644
--- a/test/files/run/enrich-gentraversable.scala
+++ b/test/files/run/enrich-gentraversable.scala
@@ -1,3 +1,6 @@
+import scala.language.implicitConversions
+import scala.language.postfixOps
+
object Test extends App {
import scala.collection.{GenTraversableOnce,GenTraversableLike}
import scala.collection.generic._
diff --git a/test/files/run/eta-expand-star2.check b/test/files/run/eta-expand-star2.check
index ce01362503..cbf4781255 100644
--- a/test/files/run/eta-expand-star2.check
+++ b/test/files/run/eta-expand-star2.check
@@ -1 +1,2 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
hello
diff --git a/test/files/run/exceptions-2.check b/test/files/run/exceptions-2.check
index 9a3044cd4f..4f8244800a 100644
--- a/test/files/run/exceptions-2.check
+++ b/test/files/run/exceptions-2.check
@@ -1,3 +1,6 @@
+exceptions-2.scala:267: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ try { 1 } catch { case e: java.io.IOException => () }
+ ^
nested1:
Innermost finally
Outermost finally
diff --git a/test/files/run/exceptions-2.scala b/test/files/run/exceptions-2.scala
index f5bbcca210..8d755c3809 100644
--- a/test/files/run/exceptions-2.scala
+++ b/test/files/run/exceptions-2.scala
@@ -42,7 +42,7 @@ object NoExcep {
def method4 = try {
Console.println("..");
} catch {
- case _ => sys.error("..");
+ case _: Throwable => sys.error("..");
}
}
@@ -136,7 +136,7 @@ object Test {
try {
sys.error("a");
} catch {
- case _ => Console.println("Silently ignore exception in finally");
+ case _: Throwable => Console.println("Silently ignore exception in finally");
}
}
}
@@ -146,7 +146,7 @@ object Test {
} finally {
val fin = "Abc";
Console.println(fin);
- };
+ }
def tryAndValInFinally: Unit =
try {
@@ -154,8 +154,8 @@ object Test {
val fin = "Abc";
try {
Console.println(fin);
- } catch { case _ => () }
- };
+ } catch { case _: Throwable => () }
+ }
def returnInBody: Unit = try {
try {
@@ -249,7 +249,7 @@ object Test {
def execute(f: => Unit) = try {
f;
} catch {
- case _ => ();
+ case _: Throwable => ()
}
diff --git a/test/files/run/exceptions-nest.scala b/test/files/run/exceptions-nest.scala
index 841e6b1c67..432d600d13 100644
--- a/test/files/run/exceptions-nest.scala
+++ b/test/files/run/exceptions-nest.scala
@@ -5,9 +5,9 @@ object Test extends App {
println(test3)
println(test4)
println(test5)
- try { println(test6) } catch { case _ => println("OK") }
+ try { println(test6) } catch { case _: Throwable => println("OK") }
println(test7)
- try { println(test8) } catch { case _ => println("OK") }
+ try { println(test8) } catch { case _: Throwable => println("OK") }
println(test9)
println(test10)
println(test11)
@@ -19,7 +19,7 @@ object Test extends App {
x = 2
} catch {
case _: NullPointerException => x = 3
- case _ => x = 4
+ case _: Throwable => x = 4
}
x
}
@@ -31,12 +31,12 @@ object Test extends App {
try {
x = 21
} catch {
- case _ => x = 22
+ case _: Throwable => x = 22
}
x = 23
} catch {
case _: NullPointerException => x = 3
- case _ => x = 4
+ case _: Throwable => x = 4
}
x
}
@@ -44,10 +44,10 @@ object Test extends App {
def test3 = {
var x = 1
try {
- try{x = 2} catch { case _ => x = 4 }
+ try{x = 2} catch { case _: Throwable => x = 4 }
} catch {
case _: NullPointerException => x = 3
- case _ => x = 4
+ case _: Throwable => x = 4
}
x
}
@@ -58,7 +58,7 @@ object Test extends App {
x = 2
} catch {
case _: NullPointerException => x = 3
- case _ => x = 4
+ case _: Throwable => x = 4
}
try {
x = 5
@@ -73,8 +73,8 @@ object Test extends App {
try {
x = 2
} catch {
- case _: NullPointerException => try { x = 3 } catch { case f => throw f }
- case _ => x = 4; try { x = 41 } catch { case _: Exception => x = 42 }; x = 43
+ case _: NullPointerException => try { x = 3 } catch { case f: Throwable => throw f }
+ case _: Throwable => x = 4; try { x = 41 } catch { case _: Exception => x = 42 }; x = 43
}
x
}
@@ -87,7 +87,7 @@ object Test extends App {
} catch {
case e: NullPointerException =>
throw e
- case _ =>
+ case _: Throwable =>
x = 3
return 1000
} finally {
@@ -105,7 +105,7 @@ object Test extends App {
try {
x = 4
} catch {
- case _ => x = 5
+ case _: Throwable => x = 5
}
}
x
@@ -116,7 +116,7 @@ object Test extends App {
try {
throw new NullPointerException
} catch {
- case e => throw e
+ case e: Throwable => throw e
}
x
}
@@ -124,7 +124,7 @@ object Test extends App {
def test9 = {
try { "" match {
case s: String => 10
- }} catch { case _ => 20 }
+ }} catch { case _: Throwable => 20 }
}
var x10 = 1
@@ -135,7 +135,7 @@ object Test extends App {
def test11 {
try { () }
- catch { case e => () }
+ catch { case e: Throwable => () }
}
class E1 extends Exception
diff --git a/test/files/run/exceptions.scala b/test/files/run/exceptions.scala
index 90f681e3c5..f0fe76946b 100644
--- a/test/files/run/exceptions.scala
+++ b/test/files/run/exceptions.scala
@@ -32,7 +32,7 @@ object exceptions {
val value = try {
map.lookup(key)
} catch {
- case e => e.getMessage()
+ case e: Throwable => e.getMessage()
}
check("lookup(" + key + ")", value, "KO");
}
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
index 14c25849cb..f5a0aa98d0 100644
--- a/test/files/run/existentials-in-compiler.scala
+++ b/test/files/run/existentials-in-compiler.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import scala.tools.nsc._
import scala.tools.partest.CompilerTest
import scala.collection.{ mutable, immutable, generic }
@@ -71,14 +74,13 @@ package extest {
}
"""
- def check(source: String, unit: global.CompilationUnit) = {
- getRequiredPackage("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
+ override def check(source: String, unit: global.CompilationUnit) {
+ getPackage("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
exitingTyper {
clazz.info
println(clazz.defString)
println(" " + classExistentialType(clazz) + "\n")
}
}
- true
}
}
diff --git a/test/files/run/existentials.scala b/test/files/run/existentials.scala
index 3977d47417..65acfd2ac0 100644
--- a/test/files/run/existentials.scala
+++ b/test/files/run/existentials.scala
@@ -1,3 +1,6 @@
+import scala.language.existentials
+import scala.language.reflectiveCalls
+
class Foo {
class Line {
case class Cell[T](var x: T)
diff --git a/test/files/run/existentials3-new.scala b/test/files/run/existentials3-new.scala
index 110c8eff7a..6112a7b856 100644
--- a/test/files/run/existentials3-new.scala
+++ b/test/files/run/existentials3-new.scala
@@ -1,3 +1,4 @@
+import scala.language.existentials
import scala.reflect.runtime.universe._
object Test {
@@ -77,4 +78,4 @@ object Misc {
}
def g1 = o1.f1 _
def g2 = o1.f2 _
-} \ No newline at end of file
+}
diff --git a/test/files/run/existentials3-old.scala b/test/files/run/existentials3-old.scala
index 944160ff12..c021c0e71e 100644
--- a/test/files/run/existentials3-old.scala
+++ b/test/files/run/existentials3-old.scala
@@ -1,3 +1,5 @@
+import scala.language.existentials
+
object Test {
trait ToS { final override def toString = getClass.getName }
diff --git a/test/files/run/finally.scala b/test/files/run/finally.scala
index 635123cc4d..01180fd217 100644
--- a/test/files/run/finally.scala
+++ b/test/files/run/finally.scala
@@ -7,7 +7,7 @@ object Test extends App {
try {
bar
} catch {
- case e => println(e)
+ case e: Throwable => println(e)
}
}
@@ -17,7 +17,7 @@ object Test extends App {
println("hi")
}
catch {
- case e => println("SHOULD NOT GET HERE")
+ case e: Throwable => println("SHOULD NOT GET HERE")
}
finally {
println("In Finally")
@@ -30,7 +30,7 @@ object Test extends App {
try {
throw new Exception
} catch {
- case e =>
+ case e: Throwable =>
println(e);
return
} finally println("in finally")
@@ -41,7 +41,7 @@ object Test extends App {
try {
throw new Exception
} catch {
- case e =>
+ case e: Throwable =>
println(e);
throw e
} finally println("in finally")
@@ -52,7 +52,7 @@ object Test extends App {
try {
return
} catch {
- case e =>
+ case e: Throwable =>
println(e);
throw e
} finally println("in finally")
@@ -63,7 +63,7 @@ object Test extends App {
try {
throw new Exception
} catch {
- case e =>
+ case e: Throwable =>
println(e);
} finally println("in finally")
}
@@ -89,7 +89,7 @@ object Test extends App {
throw new Exception
}
} catch {
- case e => println(e)
+ case e: Throwable => println(e)
}
}
@@ -99,7 +99,7 @@ object Test extends App {
try {
return 10
} finally {
- try { () } catch { case _ => () }
+ try { () } catch { case _: Throwable => () }
println("in finally 1")
}
} finally {
@@ -111,7 +111,7 @@ object Test extends App {
try {
m
} catch {
- case e => println("COUGHT: " + e)
+ case e: Throwable => println("COUGHT: " + e)
}
println("-" * 40)
}
diff --git a/test/files/run/genericValueClass.scala b/test/files/run/genericValueClass.scala
index 768e1f86a5..9398390a80 100644
--- a/test/files/run/genericValueClass.scala
+++ b/test/files/run/genericValueClass.scala
@@ -1,4 +1,6 @@
+import scala.language.implicitConversions
+
object Test extends App {
class ArrowAssocClass[A](val __leftOfArrow: A) extends AnyVal {
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
diff --git a/test/files/run/getClassTest-old.scala b/test/files/run/getClassTest-old.scala
index 951cc8d931..789dd4d162 100644
--- a/test/files/run/getClassTest-old.scala
+++ b/test/files/run/getClassTest-old.scala
@@ -50,6 +50,7 @@ class MoreAnyRefs {
def f4 = (new A { def bippy() = 5 }).getClass()
}
+@deprecated("Suppress warnings", since="2.11")
object Test {
def returnTypes[T: Manifest] = (
manifest[T].erasure.getMethods.toList
diff --git a/test/files/run/global-showdef.scala b/test/files/run/global-showdef.scala
index 71ba7b8bb3..2a00ee51bd 100644
--- a/test/files/run/global-showdef.scala
+++ b/test/files/run/global-showdef.scala
@@ -1,6 +1,7 @@
import scala.tools.nsc._
-import io.{ AbstractFile }
-import util.{ SourceFile, BatchSourceFile, stringFromStream }
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.stringFromStream
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
import scala.tools.nsc.reporters.ConsoleReporter
object Test {
@@ -53,7 +54,7 @@ object Bippy {
val run = new compiler.Run()
run.compileSources(List(src))
}
- output split "\\n" toList
+ (output split "\\n").toList
}
def showClass(name: String) = lines("-Yshow:typer", "-Xshow-class", name)
def showObject(name: String) = lines("-Yshow:typer", "-Xshow-object", name)
diff --git a/test/files/run/impconvtimes.scala b/test/files/run/impconvtimes.scala
index 8c5ab61779..477a16a890 100644
--- a/test/files/run/impconvtimes.scala
+++ b/test/files/run/impconvtimes.scala
@@ -1,3 +1,5 @@
+import scala.language.implicitConversions
+
object Test {
abstract class Unit
object NoUnit extends Unit
diff --git a/test/files/run/implicits.scala b/test/files/run/implicits.scala
index a30f60f6d0..5681a9d484 100644
--- a/test/files/run/implicits.scala
+++ b/test/files/run/implicits.scala
@@ -1,3 +1,5 @@
+import scala.language.implicitConversions
+
object A {
object B {
implicit def int2string(x: Int) = "["+x.toString+"]"
diff --git a/test/files/run/indexedSeq.scala b/test/files/run/indexedSeq.scala
index 9744f47f63..29a8be701d 100644
--- a/test/files/run/indexedSeq.scala
+++ b/test/files/run/indexedSeq.scala
@@ -1,10 +1,11 @@
object Test {
- import scala.collection.{ mutable, immutable, generic }
+ import scala.collection.immutable
def checkIdentity[A](xs: immutable.IndexedSeq[A]) = assert(xs.toIndexedSeq eq xs)
def main(args: Array[String]): Unit = {
- checkIdentity(immutable.Vector(1 to 10: _*))
- checkIdentity(1 to 10 toIndexedSeq)
+ def r = 1 to 10
+ checkIdentity(immutable.Vector(r: _*))
+ checkIdentity(r.toIndexedSeq)
}
}
diff --git a/test/files/run/inner-obj-auto.scala b/test/files/run/inner-obj-auto.scala
index aa2e29326f..35ca56ba3e 100644
--- a/test/files/run/inner-obj-auto.scala
+++ b/test/files/run/inner-obj-auto.scala
@@ -25,7 +25,7 @@ class Class2_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -54,7 +54,7 @@ object Object3_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -83,7 +83,7 @@ trait Trait4_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -112,7 +112,7 @@ class Class6_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -141,7 +141,7 @@ object Object7_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -170,7 +170,7 @@ trait Trait8_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -199,7 +199,7 @@ class Class10_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -228,7 +228,7 @@ object Object11_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -257,7 +257,7 @@ trait Trait12_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -286,7 +286,7 @@ class Class14_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -315,7 +315,7 @@ object Object15_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -344,7 +344,7 @@ trait Trait16_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -373,7 +373,7 @@ class Class18_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -402,7 +402,7 @@ object Object19_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -431,7 +431,7 @@ trait Trait20_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -460,7 +460,7 @@ class Class22_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -489,7 +489,7 @@ object Object23_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -518,7 +518,7 @@ trait Trait24_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -548,7 +548,7 @@ class Class26_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -579,7 +579,7 @@ object Object27_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -610,7 +610,7 @@ trait Trait28_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -641,7 +641,7 @@ class Class30_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -672,7 +672,7 @@ object Object31_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -703,7 +703,7 @@ trait Trait32_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -733,7 +733,7 @@ class Class34_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -762,7 +762,7 @@ object Object35_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -791,7 +791,7 @@ trait Trait36_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -820,7 +820,7 @@ class Class38_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -849,7 +849,7 @@ object Object39_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -878,7 +878,7 @@ trait Trait40_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -907,7 +907,7 @@ class Class42_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -936,7 +936,7 @@ object Object43_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -965,7 +965,7 @@ trait Trait44_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -994,7 +994,7 @@ class Class46_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -1023,7 +1023,7 @@ object Object47_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -1052,7 +1052,7 @@ trait Trait48_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -1081,7 +1081,7 @@ class Class50_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -1110,7 +1110,7 @@ object Object51_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -1139,7 +1139,7 @@ trait Trait52_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("failed "); e.printStackTrace()
+ case e: Throwable => print("failed "); e.printStackTrace()
}
}
@@ -1172,7 +1172,7 @@ class Class54_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1205,7 +1205,7 @@ object Object55_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1238,7 +1238,7 @@ trait Trait56_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1271,7 +1271,7 @@ class Class58_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1304,7 +1304,7 @@ object Object59_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1337,7 +1337,7 @@ trait Trait60_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1370,7 +1370,7 @@ class Class62_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1403,7 +1403,7 @@ object Object63_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1436,7 +1436,7 @@ trait Trait64_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1469,7 +1469,7 @@ class Class66_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1502,7 +1502,7 @@ object Object67_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1535,7 +1535,7 @@ trait Trait68_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1568,7 +1568,7 @@ class Class70_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1601,7 +1601,7 @@ object Object71_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1634,7 +1634,7 @@ trait Trait72_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1667,7 +1667,7 @@ class Class74_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1700,7 +1700,7 @@ object Object75_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1733,7 +1733,7 @@ trait Trait76_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1767,7 +1767,7 @@ class Class78_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1802,7 +1802,7 @@ object Object79_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1837,7 +1837,7 @@ trait Trait80_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1872,7 +1872,7 @@ class Class82_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1907,7 +1907,7 @@ object Object83_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1942,7 +1942,7 @@ trait Trait84_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -1976,7 +1976,7 @@ class Class90_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
@@ -2009,7 +2009,7 @@ trait Trait92_1 {
assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
println("ok")
} catch {
- case e => print("multi-threaded failed "); e.printStackTrace()
+ case e: Throwable => print("multi-threaded failed "); e.printStackTrace()
}
}
diff --git a/test/files/run/interop_classtags_are_classmanifests.scala b/test/files/run/interop_classtags_are_classmanifests.scala
index 91b9d89c6e..62d85c3ce3 100644
--- a/test/files/run/interop_classtags_are_classmanifests.scala
+++ b/test/files/run/interop_classtags_are_classmanifests.scala
@@ -1,5 +1,6 @@
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.ClassTag
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
def classTagIsClassManifest[T: ClassTag] = {
println(classManifest[T])
@@ -8,4 +9,4 @@ object Test extends App {
classTagIsClassManifest[Int]
classTagIsClassManifest[String]
classTagIsClassManifest[Array[Int]]
-} \ No newline at end of file
+}
diff --git a/test/files/run/interop_manifests_are_classtags.scala b/test/files/run/interop_manifests_are_classtags.scala
index 03479e527a..705038ece7 100644
--- a/test/files/run/interop_manifests_are_classtags.scala
+++ b/test/files/run/interop_manifests_are_classtags.scala
@@ -1,5 +1,6 @@
import scala.reflect.{ClassTag, classTag}
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
def classManifestIsClassTag[T: ClassManifest] = {
println(classTag[T])
@@ -20,4 +21,4 @@ object Test extends App {
manifestIsClassTag[Int]
manifestIsClassTag[String]
manifestIsClassTag[Array[Int]]
-} \ No newline at end of file
+}
diff --git a/test/files/run/interpolation.scala b/test/files/run/interpolation.scala
index f443bd5feb..14d9819348 100644
--- a/test/files/run/interpolation.scala
+++ b/test/files/run/interpolation.scala
@@ -13,7 +13,7 @@ object Test extends App {
println(s"Best price: $f")
println(f"Best price: $f%.2f")
println(s"$f% discount included")
- println(f"$f%3.2f% discount included")
+ println(f"$f%3.2f%% discount included")
}
test1(1)
diff --git a/test/files/run/interpolationArgs.scala b/test/files/run/interpolationArgs.scala
index eb13767907..ffb254b63f 100644
--- a/test/files/run/interpolationArgs.scala
+++ b/test/files/run/interpolationArgs.scala
@@ -1,5 +1,5 @@
object Test extends App {
- try { scala.StringContext("p1", "p2", "p3").s("e1") } catch { case ex => println(ex) }
- try { scala.StringContext("p1").s("e1") } catch { case ex => println(ex) }
+ try { scala.StringContext("p1", "p2", "p3").s("e1") } catch { case ex: Throwable => println(ex) }
+ try { scala.StringContext("p1").s("e1") } catch { case ex: Throwable => println(ex) }
}
diff --git a/test/files/run/interpolationMultiline1.scala b/test/files/run/interpolationMultiline1.scala
index 437aed44b0..db634e7775 100644
--- a/test/files/run/interpolationMultiline1.scala
+++ b/test/files/run/interpolationMultiline1.scala
@@ -13,7 +13,7 @@ object Test extends App {
println(s"""Best price: $f""")
println(f"""Best price: $f%.2f""")
println(s"""$f% discount included""")
- println(f"""$f%3.2f% discount included""")
+ println(f"""$f%3.2f%% discount included""")
}
test1(1)
diff --git a/test/files/run/interpolationMultiline2.scala b/test/files/run/interpolationMultiline2.scala
index f6a682c3ce..2de4c4b22e 100644
--- a/test/files/run/interpolationMultiline2.scala
+++ b/test/files/run/interpolationMultiline2.scala
@@ -2,14 +2,15 @@ object Test extends App {
def test1(n: Int) = {
val old = "old"
- try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
- try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ val catcher: PartialFunction[Throwable, Unit] = { case e => println(e) }
+ try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch catcher
+ try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch catcher
+ try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch catcher
+ try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch catcher
+ try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch catcher
+ try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch catcher
+ try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch catcher
+ try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch catcher
}
test1(1)
diff --git a/test/files/run/io-position.scala b/test/files/run/io-position.scala
index 1093704fa4..b227846fb4 100644
--- a/test/files/run/io-position.scala
+++ b/test/files/run/io-position.scala
@@ -1,7 +1,5 @@
object Test {
- Console.setErr(Console.out)
-
- def main(args: Array[String]): Unit = {
+ def main(args: Array[String]): Unit = Console.withErr(Console.out) {
try {
xml.parsing.ConstructingParser.fromSource(io.Source.fromString("<foo>"), false).document()
} catch {
diff --git a/test/files/run/is-valid-num.scala b/test/files/run/is-valid-num.scala
index 402eff99d6..19a3b9c7c0 100644
--- a/test/files/run/is-valid-num.scala
+++ b/test/files/run/is-valid-num.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
object Test {
def x = BigInt("10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
def y = BigDecimal("" + (Short.MaxValue + 1) + ".0")
diff --git a/test/files/run/issue192.scala b/test/files/run/issue192.scala
index d8db8b5816..3c3b44a8fb 100644
--- a/test/files/run/issue192.scala
+++ b/test/files/run/issue192.scala
@@ -1,3 +1,5 @@
+import scala.language.reflectiveCalls
+
object Test extends App {
def f1(p: Any{def unary_+ : Int}) = +p
diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala
index 8dc6ae4e51..fb33f6b5ed 100644
--- a/test/files/run/iterator-from.scala
+++ b/test/files/run/iterator-from.scala
@@ -1,4 +1,6 @@
-// This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps
+/* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import scala.util.{Random => R}
import scala.collection._
diff --git a/test/files/run/iterator-iterate-lazy.scala b/test/files/run/iterator-iterate-lazy.scala
index 73886f192b..92b170062e 100644
--- a/test/files/run/iterator-iterate-lazy.scala
+++ b/test/files/run/iterator-iterate-lazy.scala
@@ -1,5 +1,5 @@
object Test {
def main(args: Array[String]): Unit = {
- Iterator.iterate(1 to 5 toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
+ Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
}
}
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
index b85291cd72..e2d0f93a05 100644
--- a/test/files/run/iterators.scala
+++ b/test/files/run/iterators.scala
@@ -4,6 +4,8 @@
//############################################################################
+import scala.language.postfixOps
+
object Test {
def check_from: Int = {
diff --git a/test/files/run/json.scala b/test/files/run/json.scala
index a81f12564c..36e86ac5bb 100644
--- a/test/files/run/json.scala
+++ b/test/files/run/json.scala
@@ -1,13 +1,17 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import scala.util.parsing.json._
import scala.collection.immutable.TreeMap
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
/* This method converts parsed JSON back into real JSON notation with objects in
* sorted-key order. Not required by the spec, but it allows us to do a stable
* toString comparison. */
def jsonToString(in : Any) : String = in match {
case l : List[_] => "[" + l.map(jsonToString).mkString(", ") + "]"
- case m : Map[String,_] => "{" + m.iterator.toList
+ case m : Map[String @unchecked,_] => "{" + m.iterator.toList
.sortWith({ (x,y) => x._1 < y._1 })
.map({ case (k,v) => "\"" + k + "\": " + jsonToString(v) })
.mkString(", ") + "}"
@@ -20,7 +24,7 @@ object Test extends App {
*/
def sortJSON(in : Any) : Any = in match {
case l : List[_] => l.map(sortJSON)
- case m : Map[String,_] => TreeMap(m.mapValues(sortJSON).iterator.toSeq : _*)
+ case m : Map[String @unchecked,_] => TreeMap(m.mapValues(sortJSON).iterator.toSeq : _*)
// For the object versions, sort their contents, ugly casts and all...
case JSONObject(data) => JSONObject(sortJSON(data).asInstanceOf[Map[String,Any]])
case JSONArray(data) => JSONArray(sortJSON(data).asInstanceOf[List[Any]])
diff --git a/test/files/run/kind-repl-command.check b/test/files/run/kind-repl-command.check
new file mode 100644
index 0000000000..afa32acdae
--- /dev/null
+++ b/test/files/run/kind-repl-command.check
@@ -0,0 +1,32 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> :kind scala.Option
+scala.Option's kind is F[+A]
+
+scala> :k (Int, Int) => Int
+scala.Function2's kind is F[-A1,-A2,+A3]
+
+scala> :k -v Either
+scala.util.Either's kind is F[+A1,+A2]
+* -(+)-> * -(+)-> *
+This is a type constructor: a 1st-order-kinded type.
+
+scala> :k -v scala.collection.generic.ImmutableSortedMapFactory
+scala.collection.generic.ImmutableSortedMapFactory's kind is X[CC[A,B] <: scala.collection.immutable.SortedMap[A,B] with scala.collection.SortedMapLike[A,B,CC[A,B]]]
+(* -> * -> *(scala.collection.immutable.SortedMap[A,B] with scala.collection.SortedMapLike[A,B,CC[A,B]])) -> *
+This is a type constructor that takes type constructor(s): a higher-kinded type.
+
+scala> :k new { def empty = false }
+AnyRef{def empty: Boolean}'s kind is A
+
+scala> :k Nonexisting
+<console>:8: error: not found: value Nonexisting
+ Nonexisting
+ ^
+
+scala>
+
+scala>
diff --git a/test/files/run/kind-repl-command.scala b/test/files/run/kind-repl-command.scala
new file mode 100644
index 0000000000..df1fafb667
--- /dev/null
+++ b/test/files/run/kind-repl-command.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |:kind scala.Option
+ |:k (Int, Int) => Int
+ |:k -v Either
+ |:k -v scala.collection.generic.ImmutableSortedMapFactory
+ |:k new { def empty = false }
+ |:k Nonexisting
+ """.stripMargin
+}
diff --git a/test/files/run/lazy-exprs.check b/test/files/run/lazy-exprs.check
index e77d204251..2efb8ceb4a 100644
--- a/test/files/run/lazy-exprs.check
+++ b/test/files/run/lazy-exprs.check
@@ -1,3 +1,11 @@
+lazy-exprs.scala:38: warning: match may not be exhaustive.
+It would fail on the following input: Some((x: String forSome x not in Z1))
+ t match {
+ ^
+lazy-exprs.scala:62: warning: match may not be exhaustive.
+It would fail on the following input: Some((x: String forSome x not in LazyField))
+ t match {
+ ^
forced <z1>
lazy val in scrutinee: ok
forced <z1>
diff --git a/test/files/run/lazy-locals.check b/test/files/run/lazy-locals.check
index d1cc754f2c..9e88a55d18 100644
--- a/test/files/run/lazy-locals.check
+++ b/test/files/run/lazy-locals.check
@@ -1,3 +1,9 @@
+lazy-locals.scala:153: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ {
+ ^
+lazy-locals.scala:159: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ {
+ ^
forced lazy val q
q = 10
forced lazy val t
diff --git a/test/files/run/list_map.scala b/test/files/run/list_map.scala
new file mode 100755
index 0000000000..fba3aae228
--- /dev/null
+++ b/test/files/run/list_map.scala
@@ -0,0 +1,26 @@
+import collection.immutable.ListMap
+
+object Test {
+ def testImmutableMinus() {
+ val empty = ListMap.empty[Int, Int]
+
+ val m0 = ListMap(1 -> 1, 2 -> 2)
+ val m1 = m0 - 3
+ assert (m1 eq m0)
+ val m2 = m0 - 1
+ assert (m2.size == 1)
+ val m3 = m2 - 2
+ assert (m3 eq empty)
+
+ val m4 = ListMap(1 -> 1, 2 -> 2, 3 -> 3)
+ val m5 = m4 - 1
+ assert (m5 == ListMap(2 -> 2, 3 -> 3))
+ assert (m5.toList == (2, 2)::(3, 3)::Nil)
+
+ assert ((empty - 1) eq empty)
+ }
+
+ def main(args: Array[String]) {
+ testImmutableMinus()
+ }
+}
diff --git a/test/files/run/lists-run.scala b/test/files/run/lists-run.scala
index ccfe5bc260..5ff3ee353e 100644
--- a/test/files/run/lists-run.scala
+++ b/test/files/run/lists-run.scala
@@ -2,6 +2,8 @@
*
* @author Stephane Micheloud
*/
+import scala.language.postfixOps
+
object Test {
def main(args: Array[String]) {
Test_multiset.run() // multiset operations: union, intersect, diff
@@ -153,7 +155,7 @@ object Test3 {
List.range(1, 10, 0)
} catch {
case e: IllegalArgumentException => ()
- case _ => throw new Error("List.range(1, 10, 0)")
+ case _: Throwable => throw new Error("List.range(1, 10, 0)")
}
assert(List.range(10, 0, -2) == List(10, 8, 6, 4, 2))
}
diff --git a/test/files/run/literals.check b/test/files/run/literals.check
index f53c879dea..6be5137994 100644
--- a/test/files/run/literals.check
+++ b/test/files/run/literals.check
@@ -1,3 +1,4 @@
+warning: there were 13 deprecation warning(s); re-run with -deprecation for details
test '\u0024' == '$' was successful
test '\u005f' == '_' was successful
test 65.asInstanceOf[Char] == 'A' was successful
@@ -43,7 +44,6 @@ test 0x8000000000000000L == -9223372036854775808L was successful
test 0xffffffffffffffffL == -1L was successful
test 1e1f == 10.0f was successful
-test 2.f == 2.0f was successful
test .3f == 0.3f was successful
test 0f == 0.0f was successful
test 3.14f == 3.14f was successful
@@ -53,8 +53,6 @@ test 1.asInstanceOf[Float] == 1.0 was successful
test 1l.asInstanceOf[Float] == 1.0 was successful
test 1e1 == 10.0 was successful
-test 2. == 2.0 was successful
-test 2.d == 2.0 was successful
test .3 == 0.3 was successful
test 0.0 == 0.0 was successful
test 0d == 0.0 was successful
diff --git a/test/files/run/literals.scala b/test/files/run/literals.scala
index 32bc29fda8..7676125339 100644
--- a/test/files/run/literals.scala
+++ b/test/files/run/literals.scala
@@ -105,7 +105,7 @@ object Test {
// float
check_success("1e1f == 10.0f", 1e1f, 10.0f)
- check_success("2.f == 2.0f", 2.f, 2.0f)
+ //check_success("2.f == 2.0f", 2.f, 2.0f)
check_success(".3f == 0.3f", .3f, 0.3f)
check_success("0f == 0.0f", 0f, 0.0f)
check_success("3.14f == 3.14f", 3.14f, 3.14f)
@@ -118,8 +118,8 @@ object Test {
// double
check_success("1e1 == 10.0", 1e1, 10.0)
- check_success("2. == 2.0", 2., 2.0)
- check_success("2.d == 2.0", 2.d, 2.0)
+ //check_success("2. == 2.0", 2., 2.0)
+ //check_success("2.d == 2.0", 2.d, 2.0)
check_success(".3 == 0.3", .3, 0.3)
check_success("0.0 == 0.0", 0.0, 0.0)
check_success("0d == 0.0", 0d, 0.0)
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index f3a6bef215..50a0cadebf 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -8,7 +8,8 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]; def takeRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]; def drop(n: Int): scala.collecti...
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with java.io.Serializable] = List(List(), Vector())
+
scala>
scala>
diff --git a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
index 9c1e4ee46d..56c5252f31 100644
--- a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
+++ b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
@@ -7,6 +7,8 @@ object Impls {
def refToFoo_impl(c: Ctx)(dummy: c.Expr[Int]) = {
import c.universe._
val body = Select(Ident(TermName("Impls")), TermName("foo"))
+ val global = c.universe.asInstanceOf[scala.tools.nsc.Global]
+ global.analyzer.markMacroImplRef(body.asInstanceOf[global.Tree])
c.Expr[Int](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-bundle.check b/test/files/run/macro-bundle.check
new file mode 100644
index 0000000000..2107454960
--- /dev/null
+++ b/test/files/run/macro-bundle.check
@@ -0,0 +1,3 @@
+()
+Int
+()
diff --git a/test/files/neg/macro-invalidimpl-i.flags b/test/files/run/macro-bundle.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidimpl-i.flags
+++ b/test/files/run/macro-bundle.flags
diff --git a/test/files/run/macro-bundle/Impls_Macros_1.scala b/test/files/run/macro-bundle/Impls_Macros_1.scala
new file mode 100644
index 0000000000..3f651c9a43
--- /dev/null
+++ b/test/files/run/macro-bundle/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.Context
+import scala.reflect.macros.Macro
+
+trait Impl extends Macro {
+ def mono = c.literalUnit
+ def poly[T: c.WeakTypeTag] = c.literal(c.weakTypeOf[T].toString)
+ def weird = macro mono
+}
+
+object Macros {
+ def mono = macro Impl.mono
+ def poly[T] = macro Impl.poly[T]
+} \ No newline at end of file
diff --git a/test/files/run/macro-bundle/Test_2.scala b/test/files/run/macro-bundle/Test_2.scala
new file mode 100644
index 0000000000..428f809f9d
--- /dev/null
+++ b/test/files/run/macro-bundle/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ println(Macros.mono)
+ println(Macros.poly[Int])
+ println(new Impl{val c = ???}.weird)
+} \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a.check b/test/files/run/macro-def-infer-return-type-a.check
deleted file mode 100644
index f70d7bba4a..0000000000
--- a/test/files/run/macro-def-infer-return-type-a.check
+++ /dev/null
@@ -1 +0,0 @@
-42 \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a.flags b/test/files/run/macro-def-infer-return-type-a.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-infer-return-type-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a/Impls_1.scala b/test/files/run/macro-def-infer-return-type-a/Impls_1.scala
deleted file mode 100644
index 52c9f9c3e9..0000000000
--- a/test/files/run/macro-def-infer-return-type-a/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo(c: Ctx)(x: c.Expr[Int]) = x
-}
diff --git a/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala
deleted file mode 100644
index 60fe9dc1c2..0000000000
--- a/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- def foo(x: Int) = macro Impls.foo
- println(foo(42))
-} \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b.flags b/test/files/run/macro-def-infer-return-type-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-infer-return-type-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala b/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala
deleted file mode 100644
index 8a0f18c01b..0000000000
--- a/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T](c: Ctx)(x: c.Expr[T]) =
- throw new Error("an implementation is missing")
-}
-
-object Macros {
- def foo[T](x: T) = macro Impls.foo[T]
-} \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Test_2.scala b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
deleted file mode 100644
index 9e57b90b57..0000000000
--- a/test/files/run/macro-def-infer-return-type-b/Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test extends App {
- import scala.reflect.runtime.universe._
- import scala.reflect.runtime.{currentMirror => cm}
- import scala.tools.reflect.ToolBox
- val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
- try cm.mkToolBox().eval(tree)
- catch { case ex: Throwable => println(ex.getMessage) }
-}
diff --git a/test/files/run/macro-def-infer-return-type-c.check b/test/files/run/macro-def-infer-return-type-c.check
deleted file mode 100644
index f70d7bba4a..0000000000
--- a/test/files/run/macro-def-infer-return-type-c.check
+++ /dev/null
@@ -1 +0,0 @@
-42 \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-c.flags b/test/files/run/macro-def-infer-return-type-c.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-infer-return-type-c.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-c/Impls_1.scala b/test/files/run/macro-def-infer-return-type-c/Impls_1.scala
deleted file mode 100644
index 78db67eebf..0000000000
--- a/test/files/run/macro-def-infer-return-type-c/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T](c: Ctx)(x: c.Expr[T]): c.Expr[T] = x
-}
diff --git a/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala
deleted file mode 100644
index 967d16f6de..0000000000
--- a/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- def foo[T](x: T) = macro Impls.foo[T]
- println(foo(42))
-} \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b.check b/test/files/run/macro-def-infer-return-type.check
index ae2dc7a06f..308e881960 100644
--- a/test/files/run/macro-def-infer-return-type-b.check
+++ b/test/files/run/macro-def-infer-return-type.check
@@ -1,6 +1,8 @@
+42
reflective compilation has failed:
exception during macro expansion:
java.lang.Error: an implementation is missing
- at Impls$.foo(Impls_Macros_1.scala:5)
+ at Impls2$.foo(Impls_1.scala:9)
+42
diff --git a/test/files/neg/macro-invalidret-nontree.flags b/test/files/run/macro-def-infer-return-type.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidret-nontree.flags
+++ b/test/files/run/macro-def-infer-return-type.flags
diff --git a/test/files/run/macro-def-infer-return-type/Impls_1.scala b/test/files/run/macro-def-infer-return-type/Impls_1.scala
new file mode 100644
index 0000000000..f8636fe725
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type/Impls_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.Context
+
+object Impls1 {
+ def foo(c: Context)(x: c.Expr[Int]) = x
+}
+
+object Impls2 {
+ def foo[T](c: Context)(x: c.Expr[T]) =
+ throw new Error("an implementation is missing")
+}
+
+object Impls3 {
+ def foo[T](c: Context)(x: c.Expr[T]): c.Expr[T] = x
+}
diff --git a/test/files/run/macro-def-infer-return-type/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type/Macros_Test_2.scala
new file mode 100644
index 0000000000..f579586b7f
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type/Macros_Test_2.scala
@@ -0,0 +1,24 @@
+object Macros1 {
+ def foo(x: Int) = macro Impls1.foo
+}
+
+object Macros2 {
+ def foo[T](x: T) = macro Impls2.foo[T]
+}
+
+object Macros3 {
+ def foo[T](x: T) = macro Impls3.foo[T]
+}
+
+object Test extends App {
+ println(Macros1.foo(42))
+
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Apply(Select(Ident(TermName("Macros2")), TermName("foo")), List(Literal(Constant(42))))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+
+ println(Macros3.foo(42))
+}
diff --git a/test/files/run/macro-def-path-dependent-a.flags b/test/files/run/macro-def-path-dependent-a.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-path-dependent-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-b.check b/test/files/run/macro-def-path-dependent-b.check
deleted file mode 100644
index 7658ad2c24..0000000000
--- a/test/files/run/macro-def-path-dependent-b.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-b.flags b/test/files/run/macro-def-path-dependent-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-path-dependent-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-c.check b/test/files/run/macro-def-path-dependent-c.check
deleted file mode 100644
index 7658ad2c24..0000000000
--- a/test/files/run/macro-def-path-dependent-c.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-c.flags b/test/files/run/macro-def-path-dependent-c.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-path-dependent-c.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1.check b/test/files/run/macro-def-path-dependent-d1.check
deleted file mode 100644
index 7658ad2c24..0000000000
--- a/test/files/run/macro-def-path-dependent-d1.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-d1.flags b/test/files/run/macro-def-path-dependent-d1.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-path-dependent-d1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2.check b/test/files/run/macro-def-path-dependent-d2.check
deleted file mode 100644
index 7658ad2c24..0000000000
--- a/test/files/run/macro-def-path-dependent-d2.check
+++ /dev/null
@@ -1 +0,0 @@
-it works
diff --git a/test/files/run/macro-def-path-dependent-d2.flags b/test/files/run/macro-def-path-dependent-d2.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-def-path-dependent-d2.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Test_3.scala b/test/files/run/macro-def-path-dependent-d2/Test_3.scala
deleted file mode 100644
index 7dffc5107d..0000000000
--- a/test/files/run/macro-def-path-dependent-d2/Test_3.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
- println("it works")
-} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation.check b/test/files/run/macro-def-path-dependent.check
index 7658ad2c24..7658ad2c24 100644
--- a/test/files/run/macro-declared-in-annotation.check
+++ b/test/files/run/macro-def-path-dependent.check
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.flags b/test/files/run/macro-def-path-dependent.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidret-nonuniversetree.flags
+++ b/test/files/run/macro-def-path-dependent.flags
diff --git a/test/files/run/macro-def-path-dependent-a/Test_2.scala b/test/files/run/macro-def-path-dependent/Dummy.scala
index 7dffc5107d..7dffc5107d 100644
--- a/test/files/run/macro-def-path-dependent-a/Test_2.scala
+++ b/test/files/run/macro-def-path-dependent/Dummy.scala
diff --git a/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent/Test_1.scala
index 3a91e41ff9..06c15e16c9 100644
--- a/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala
+++ b/test/files/run/macro-def-path-dependent/Test_1.scala
@@ -1,3 +1,5 @@
+package test1
+
import scala.reflect.macros.{Context => Ctx}
trait Exprs {
diff --git a/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent/Test_2.scala
index cf9f9ebd0e..f1e9909981 100644
--- a/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala
+++ b/test/files/run/macro-def-path-dependent/Test_2.scala
@@ -1,3 +1,5 @@
+package test2
+
import scala.reflect.macros.{Context => Ctx}
trait Exprs {
diff --git a/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent/Test_3.scala
index 6cb374d9ba..9f5efe5e47 100644
--- a/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala
+++ b/test/files/run/macro-def-path-dependent/Test_3.scala
@@ -1,3 +1,5 @@
+package test3
+
import scala.reflect.macros.{Context => Ctx}
trait Exprs {
diff --git a/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent/Test_4.scala
index 69d9708b2a..3af920d739 100644
--- a/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
+++ b/test/files/run/macro-def-path-dependent/Test_4.scala
@@ -1,3 +1,5 @@
+package test4
+
import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
import scala.reflect.api.Universe
diff --git a/test/files/run/macro-def-path-dependent-d2/Impls_1.scala b/test/files/run/macro-def-path-dependent/Test_5.scala
index 7fa9c3579e..bc32fb92de 100644
--- a/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
+++ b/test/files/run/macro-def-path-dependent/Test_5.scala
@@ -1,3 +1,5 @@
+package test56
+
import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
import scala.reflect.api.Universe
diff --git a/test/files/run/macro-def-path-dependent-d2/Macros_2.scala b/test/files/run/macro-def-path-dependent/Test_6.scala
index 65ce4d8bd2..6267743473 100644
--- a/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
+++ b/test/files/run/macro-def-path-dependent/Test_6.scala
@@ -1,3 +1,5 @@
+package test56
+
import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
import scala.reflect.api.Universe
diff --git a/test/files/run/macro-divergence-spurious.check b/test/files/run/macro-divergence-spurious.check
new file mode 100644
index 0000000000..19765bd501
--- /dev/null
+++ b/test/files/run/macro-divergence-spurious.check
@@ -0,0 +1 @@
+null
diff --git a/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
new file mode 100644
index 0000000000..bc4a9fded7
--- /dev/null
+++ b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Complex[T]
+
+class Foo(val bar: Bar)
+class Bar(val s: String)
+
+object Complex {
+ def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
+ import c.universe._
+ val tpe = weakTypeOf[T]
+ for (f <- tpe.declarations.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
+ val trecur = appliedType(typeOf[Complex[_]], List(f.typeSignature))
+ val recur = c.inferImplicitValue(trecur, silent = true)
+ if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
+ }
+ c.literalNull
+ }
+
+ implicit object ComplexString extends Complex[String]
+ implicit def genComplex[T]: Complex[T] = macro impl[T]
+}
diff --git a/test/files/run/macro-divergence-spurious/Test_2.scala b/test/files/run/macro-divergence-spurious/Test_2.scala
new file mode 100644
index 0000000000..dcc4593335
--- /dev/null
+++ b/test/files/run/macro-divergence-spurious/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(implicitly[Complex[Foo]])
+} \ No newline at end of file
diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check
index e69de29bb2..58781b719a 100644
--- a/test/files/run/macro-duplicate.check
+++ b/test/files/run/macro-duplicate.check
@@ -0,0 +1,3 @@
+Test_2.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ Macros.foo
+ ^
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
index 81ebd63c5f..22047eeb36 100644
--- a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
@@ -1,4 +1,5 @@
object Macros {
+ import scala.language.implicitConversions
implicit def foo(x: String): Option[Int] = macro Impls.foo
}
@@ -7,4 +8,4 @@ object Test extends App {
println("2": Option[Int])
val s: Int = "2" getOrElse 0
println(s)
-} \ No newline at end of file
+}
diff --git a/test/files/run/macro-expand-implicit-macro-is-view.flags b/test/files/run/macro-expand-implicit-macro-is-view.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-implicit-macro-is-view.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
index 0ff1fb80ca..0d99f32d7e 100644
--- a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
@@ -1,4 +1,7 @@
+
object Macros {
+ import scala.language.experimental.macros
+ import scala.language.implicitConversions
implicit def foo(x: String): Option[Int] = macro Impls.foo
}
@@ -6,4 +9,4 @@ object Test extends App {
import Macros._
def bar[T <% Option[Int]](x: T) = println(x)
println("2")
-} \ No newline at end of file
+}
diff --git a/test/files/run/macro-expand-nullary-generic.check b/test/files/run/macro-expand-nullary-generic.check
index 133840c469..0470d239dc 100644
--- a/test/files/run/macro-expand-nullary-generic.check
+++ b/test/files/run/macro-expand-nullary-generic.check
@@ -1,6 +1,6 @@
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
+fooNullary[Int]
+fooEmpty[Int]
+fooEmpty[Int]
+barNullary[Int]
+barEmpty[Int]
kkthxbai
diff --git a/test/files/run/macro-expand-nullary-generic/Impls_1.scala b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
index 39a9db0e14..5dfdd5c539 100644
--- a/test/files/run/macro-expand-nullary-generic/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
@@ -2,14 +2,14 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def impl[T: c.WeakTypeTag](c: Ctx) = {
+ def impl[T: c.WeakTypeTag](c: Ctx)(meth: String) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works " + implicitly[c.WeakTypeTag[T]]))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(s"$meth[${c.weakTypeOf[T]}]"))))
c.Expr[Unit](body)
}
- def fooNullary[T: c.WeakTypeTag](c: Ctx) = impl[T](c)
- def fooEmpty[T: c.WeakTypeTag](c: Ctx)() = impl[T](c)
- def barNullary[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int]) = impl[T](c)
- def barEmpty[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int])() = impl[T](c)
+ def fooNullary[T: c.WeakTypeTag](c: Ctx) = impl[T](c)("fooNullary")
+ def fooEmpty[T: c.WeakTypeTag](c: Ctx)() = impl[T](c)("fooEmpty")
+ def barNullary[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int]) = impl[T](c)("barNullary")
+ def barEmpty[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int])() = impl[T](c)("barEmpty")
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-nongeneric.check b/test/files/run/macro-expand-nullary-nongeneric.check
index 9ab5f3a2bc..cb7e766394 100644
--- a/test/files/run/macro-expand-nullary-nongeneric.check
+++ b/test/files/run/macro-expand-nullary-nongeneric.check
@@ -1,6 +1,6 @@
-it works
-it works
-it works
-it works
-it works
+fooNullary
+fooEmpty
+fooEmpty
+barNullary
+barEmpty
kkthxbai
diff --git a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
index 41e50acc86..d23c671c84 100644
--- a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
@@ -1,14 +1,15 @@
+import scala.reflect.runtime.universe._
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def impl(c: Ctx) = {
+ def impl(c: Ctx)(meth: String) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(meth))))
c.Expr[Unit](body)
}
- def fooNullary(c: Ctx) = impl(c)
- def fooEmpty(c: Ctx)() = impl(c)
- def barNullary(c: Ctx)(x: c.Expr[Int]) = impl(c)
- def barEmpty(c: Ctx)(x: c.Expr[Int])() = impl(c)
+ def fooNullary(c: Ctx) = impl(c)("fooNullary")
+ def fooEmpty(c: Ctx)() = impl(c)("fooEmpty")
+ def barNullary(c: Ctx)(x: c.Expr[Int]) = impl(c)("barNullary")
+ def barEmpty(c: Ctx)(x: c.Expr[Int])() = impl(c)("barEmpty")
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-a.flags b/test/files/run/macro-expand-tparams-bounds-a.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-bounds-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala
deleted file mode 100644
index b498e6f65b..0000000000
--- a/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo[U <: String] = macro Impls.foo[U]
-}
-
-object Test extends App {
- import Macros._
- foo[String]
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-b.flags b/test/files/run/macro-expand-tparams-bounds-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-bounds-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala
deleted file mode 100644
index c11c89151c..0000000000
--- a/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-class C
-
-object Impls {
- def foo[U <: C](c: Ctx): c.Expr[Unit] = c.literalUnit
-}
diff --git a/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala
deleted file mode 100644
index 1a261e9f73..0000000000
--- a/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class D extends C
-
-object Macros {
- def foo[T <: D] = macro Impls.foo[T]
-}
-
-object Test extends App {
- import Macros._
- foo[D]
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds.check b/test/files/run/macro-expand-tparams-bounds.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds.check
diff --git a/test/files/neg/macro-invalidshape-a.flags b/test/files/run/macro-expand-tparams-bounds.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidshape-a.flags
+++ b/test/files/run/macro-expand-tparams-bounds.flags
diff --git a/test/files/run/macro-expand-tparams-bounds/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
new file mode 100644
index 0000000000..f9103aaf8f
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.Context
+
+object Impls1 {
+ def foo[U <: String](c: Context): c.Expr[Unit] = c.literalUnit
+}
+
+class C
+class D extends C
+
+object Impls2 {
+ def foo[U <: C](c: Context): c.Expr[Unit] = c.literalUnit
+}
diff --git a/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala
new file mode 100644
index 0000000000..37a4bcb2b9
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds/Macros_Test_2.scala
@@ -0,0 +1,12 @@
+object Macros1 {
+ def foo[U <: String] = macro Impls1.foo[U]
+}
+
+object Macros2 {
+ def foo[T <: D] = macro Impls2.foo[T]
+}
+
+object Test extends App {
+ Macros1.foo[String]
+ Macros2.foo[D]
+}
diff --git a/test/files/run/macro-expand-tparams-explicit.check b/test/files/run/macro-expand-tparams-explicit.check
index e7e6718406..b6b4f6fa3a 100644
--- a/test/files/run/macro-expand-tparams-explicit.check
+++ b/test/files/run/macro-expand-tparams-explicit.check
@@ -1 +1 @@
-TypeTag[Int]
+WeakTypeTag[Int]
diff --git a/test/files/run/macro-expand-tparams-implicit.check b/test/files/run/macro-expand-tparams-implicit.check
index fa6b335afb..a9bf55423e 100644
--- a/test/files/run/macro-expand-tparams-implicit.check
+++ b/test/files/run/macro-expand-tparams-implicit.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-only-in-impl.flags b/test/files/run/macro-expand-tparams-only-in-impl.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-only-in-impl.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala b/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala
deleted file mode 100644
index 9b8dafaa97..0000000000
--- a/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U <: String](c: Ctx): c.Expr[Unit] = c.literalUnit
-}
diff --git a/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala
deleted file mode 100644
index 218c7aec7f..0000000000
--- a/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Macros {
- def foo = macro Impls.foo[String]
-}
-
-object Test extends App {
- import Macros._
- foo
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional.flags b/test/files/run/macro-expand-tparams-optional.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-optional.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a.check b/test/files/run/macro-expand-tparams-prefix-a.check
deleted file mode 100644
index 0bf3c55bbe..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-a.check
+++ /dev/null
@@ -1,4 +0,0 @@
-TypeTag[Int]
-TypeTag[Int]
-WeakTypeTag[String]
-TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-a.flags b/test/files/run/macro-expand-tparams-prefix-a.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
deleted file mode 100644
index c729aada51..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
- import c.universe._
- val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
- c.Expr[Unit](body)
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala
deleted file mode 100644
index 81ccb7ff42..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test extends App {
- class C[T] {
- def foo[U](x: U) = macro Impls.foo[U]
- }
-
- new C[Int]().foo(42)
- new C[Boolean]().foo(42)
- new C[Int]().foo("42")
- new C[String]().foo(true)
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b.check b/test/files/run/macro-expand-tparams-prefix-b.check
deleted file mode 100644
index 77c2ee9051..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-TypeTag[Boolean] TypeTag[Int]
-TypeTag[Boolean] WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-prefix-b.flags b/test/files/run/macro-expand-tparams-prefix-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
deleted file mode 100644
index 8880d13b04..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
- import c.universe._
- val T = implicitly[c.WeakTypeTag[T]]
- val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
- c.Expr[Unit](body)
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala
deleted file mode 100644
index a4a0acfe8b..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test extends App {
- class C[T] {
- def foo[U](x: U) = macro Impls.foo[T, U]
- }
-
- object D extends C[Boolean]
-
- D.foo(42)
- D.foo("42")
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.check b/test/files/run/macro-expand-tparams-prefix-c1.check
deleted file mode 100644
index f0dd5b9cd8..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c1.check
+++ /dev/null
@@ -1,3 +0,0 @@
-TypeTag[Int]
-WeakTypeTag[String]
-TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.flags b/test/files/run/macro-expand-tparams-prefix-c1.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
deleted file mode 100644
index 2df42e969f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- import c.universe._
- c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
- Literal(Constant(()))))
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala
deleted file mode 100644
index 4fa0c8cb33..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class D[T] {
- class C[U] {
- def foo[V] = macro Impls.foo[T, U, V]
- }
-}
-
-object Test extends App {
- val outer1 = new D[Int]
- val outer2 = new outer1.C[String]
- outer2.foo[Boolean]
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.check b/test/files/run/macro-expand-tparams-prefix-c2.check
deleted file mode 100644
index f0dd5b9cd8..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c2.check
+++ /dev/null
@@ -1,3 +0,0 @@
-TypeTag[Int]
-WeakTypeTag[String]
-TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.flags b/test/files/run/macro-expand-tparams-prefix-c2.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c2.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
deleted file mode 100644
index 08817708d4..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- import c.universe._
- c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
- Literal(Constant(()))))
- }
-}
-
-class D[T] {
- class C[U] {
- def foo[V] = macro Impls.foo[T, U, V]
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala b/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala
deleted file mode 100644
index e729d4a536..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test extends App {
- val outer1 = new D[Int]
- val outer2 = new outer1.C[String]
- outer2.foo[Boolean]
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.check b/test/files/run/macro-expand-tparams-prefix-d1.check
deleted file mode 100644
index c5aaaf5a09..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-d1.check
+++ /dev/null
@@ -1,3 +0,0 @@
-WeakTypeTag[T]
-WeakTypeTag[U]
-TypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.flags b/test/files/run/macro-expand-tparams-prefix-d1.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-d1.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
deleted file mode 100644
index 2df42e969f..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.macros.{Context => Ctx}
-
-object Impls {
- def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
- import c.universe._
- c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
- Literal(Constant(()))))
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala
deleted file mode 100644
index 8222a6d1e8..0000000000
--- a/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test extends App {
- class D[T] {
- class C[U] {
- def foo[V] = macro Impls.foo[T, U, V]
- foo[Boolean]
- }
- }
-
- val outer1 = new D[Int]
- new outer1.C[String]
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix.check b/test/files/run/macro-expand-tparams-prefix.check
new file mode 100644
index 0000000000..7397958066
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix.check
@@ -0,0 +1,20 @@
+===Macros1===
+WeakTypeTag[Int]
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
+===Macros2===
+WeakTypeTag[Boolean] WeakTypeTag[Int]
+WeakTypeTag[Boolean] WeakTypeTag[String]
+===Macros3===
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
+===Macros4===
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
+===Macros5===
+WeakTypeTag[T]
+WeakTypeTag[U]
+WeakTypeTag[Boolean]
diff --git a/test/files/neg/macro-invalidshape-b.flags b/test/files/run/macro-expand-tparams-prefix.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidshape-b.flags
+++ b/test/files/run/macro-expand-tparams-prefix.flags
diff --git a/test/files/run/macro-expand-tparams-prefix/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
new file mode 100644
index 0000000000..e92396d1b4
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
@@ -0,0 +1,40 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+
+object Impls1 {
+ def foo[U: c.WeakTypeTag](c: Context)(x: c.Expr[U]) = {
+ import c.universe._
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
+ c.Expr[Unit](body)
+ }
+}
+
+object Impls2 {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context)(x: c.Expr[U]) = {
+ import c.universe._
+ val T = implicitly[c.WeakTypeTag[T]]
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
+ c.Expr[Unit](body)
+ }
+}
+
+object Impls345 {
+ def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ c.Expr(Block(List(
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
+ Literal(Constant(()))))
+ }
+}
+
+object Macros4 {
+ class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls345.foo[T, U, V]
+ }
+ }
+}
diff --git a/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala
new file mode 100644
index 0000000000..2b1730d36e
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix/Macros_Test_2.scala
@@ -0,0 +1,57 @@
+object Macros1 {
+ class C[T] {
+ def foo[U](x: U) = macro Impls1.foo[U]
+ }
+}
+
+object Macros2 {
+ class C[T] {
+ def foo[U](x: U) = macro Impls2.foo[T, U]
+ }
+}
+
+object Macros3 {
+ class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls345.foo[T, U, V]
+ }
+ }
+}
+
+// object Macros4 is declared in Impls_1.scala
+
+object Macros5 {
+ class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls345.foo[T, U, V]
+ foo[Boolean]
+ }
+ }
+}
+
+object Test extends App {
+ println("===Macros1===")
+ new Macros1.C[Int]().foo(42)
+ new Macros1.C[Boolean]().foo(42)
+ new Macros1.C[Int]().foo("42")
+ new Macros1.C[String]().foo(true)
+
+ println("===Macros2===")
+ object D2 extends Macros2.C[Boolean]
+ D2.foo(42)
+ D2.foo("42")
+
+ println("===Macros3===")
+ val outer31 = new Macros3.D[Int]
+ val outer32 = new outer31.C[String]
+ outer32.foo[Boolean]
+
+ println("===Macros4===")
+ val outer41 = new Macros4.D[Int]
+ val outer42 = new outer41.C[String]
+ outer42.foo[Boolean]
+
+ println("===Macros5===")
+ val outer1 = new Macros5.D[Int]
+ new outer1.C[String]
+} \ No newline at end of file
diff --git a/test/files/run/macro-impl-relaxed.check b/test/files/run/macro-impl-relaxed.check
new file mode 100644
index 0000000000..487b116534
--- /dev/null
+++ b/test/files/run/macro-impl-relaxed.check
@@ -0,0 +1,4 @@
+2
+2
+2
+2
diff --git a/test/files/run/macro-impl-relaxed/Macros_1.scala b/test/files/run/macro-impl-relaxed/Macros_1.scala
new file mode 100644
index 0000000000..af62646b4e
--- /dev/null
+++ b/test/files/run/macro-impl-relaxed/Macros_1.scala
@@ -0,0 +1,14 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def implUU(c: Context)(x: c.Tree): c.Tree = x
+ def implTU(c: Context)(x: c.Expr[Int]): c.Tree = x.tree
+ def implUT(c: Context)(x: c.Tree): c.Expr[Int] = c.Expr[Int](x)
+ def implTT(c: Context)(x: c.Expr[Int]): c.Expr[Int] = x
+
+ def fooUU(x: Int): Int = macro implUU
+ def fooTU(x: Int): Int = macro implTU
+ def fooUT(x: Int): Int = macro implUT
+ def fooTT(x: Int): Int = macro implTT
+} \ No newline at end of file
diff --git a/test/files/run/macro-impl-relaxed/Test_2.scala b/test/files/run/macro-impl-relaxed/Test_2.scala
new file mode 100644
index 0000000000..2eaeef0fd0
--- /dev/null
+++ b/test/files/run/macro-impl-relaxed/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ println(Macros.fooUU(2))
+ println(Macros.fooTU(2))
+ println(Macros.fooUT(2))
+ println(Macros.fooTT(2))
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-c.flags b/test/files/run/macro-impl-tparam-only-in-impl.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidshape-c.flags
+++ b/test/files/run/macro-impl-tparam-only-in-impl.flags
diff --git a/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
index 9b8dafaa97..9b8dafaa97 100644
--- a/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala
+++ b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
diff --git a/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala b/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala
index 218c7aec7f..218c7aec7f 100644
--- a/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala
+++ b/test/files/run/macro-impl-tparam-only-in-impl/Macros_Test_2.scala
diff --git a/test/files/run/macro-expand-tparams-optional.check b/test/files/run/macro-impl-tparam-typetag-is-optional.check
index b4a0f394c1..b4a0f394c1 100644
--- a/test/files/run/macro-expand-tparams-optional.check
+++ b/test/files/run/macro-impl-tparam-typetag-is-optional.check
diff --git a/test/files/neg/macro-invalidusage-badbounds-a.flags b/test/files/run/macro-impl-tparam-typetag-is-optional.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-invalidusage-badbounds-a.flags
+++ b/test/files/run/macro-impl-tparam-typetag-is-optional.flags
diff --git a/test/files/run/macro-expand-tparams-optional/Impls_1.scala b/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala
index ace7a6cd26..ace7a6cd26 100644
--- a/test/files/run/macro-expand-tparams-optional/Impls_1.scala
+++ b/test/files/run/macro-impl-tparam-typetag-is-optional/Impls_1.scala
diff --git a/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala b/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala
index e72c27881a..e72c27881a 100644
--- a/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala
+++ b/test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
index f1d5e925fa..326f3e08ca 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
@@ -1,3 +1,3 @@
reflective compilation has failed:
-macros cannot be partially applied
+too few argument lists for macro invocation
diff --git a/test/files/run/macro-invalidusage-partialapplication.check b/test/files/run/macro-invalidusage-partialapplication.check
index f1d5e925fa..326f3e08ca 100644
--- a/test/files/run/macro-invalidusage-partialapplication.check
+++ b/test/files/run/macro-invalidusage-partialapplication.check
@@ -1,3 +1,3 @@
reflective compilation has failed:
-macros cannot be partially applied
+too few argument lists for macro invocation
diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
index 5f3f61ca3f..8d7d3b5d3d 100644
--- a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
@@ -7,7 +7,7 @@ object Macros {
val inscope = c.inferImplicitValue(c.mirror.staticClass("SourceLocation").toType)
val outer = c.Expr[SourceLocation](if (!inscope.isEmpty) inscope else Literal(Constant(null)))
- val Apply(fun, args) = c.enclosingImplicits(0)._2
+ val Apply(fun, args) = c.enclosingImplicits(0).tree
val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
diff --git a/test/files/run/macro-sip19/Impls_Macros_1.scala b/test/files/run/macro-sip19/Impls_Macros_1.scala
index 535ec2ccf0..4c165ed1b8 100644
--- a/test/files/run/macro-sip19/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19/Impls_Macros_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.Context
object Macros {
def impl(c: Context) = {
import c.universe._
- val Apply(fun, args) = c.enclosingImplicits(0)._2
+ val Apply(fun, args) = c.enclosingImplicits(0).tree
val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
diff --git a/test/files/run/macro-system-properties.check b/test/files/run/macro-system-properties.check
new file mode 100644
index 0000000000..a2da4aaf60
--- /dev/null
+++ b/test/files/run/macro-system-properties.check
@@ -0,0 +1,26 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import language.experimental._, reflect.macros.Context
+import language.experimental._
+import reflect.macros.Context
+
+scala> object GrabContext {
+ def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
+ // System.properties lets you stash true globals (unlike statics which are classloader scoped)
+ def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
+ def grab() = macro impl
+ }
+defined object GrabContext
+
+scala> object Test { class C(implicit a: Any) { GrabContext.grab } }
+defined object Test
+
+scala> object Test { class C(implicit a: Any) { GrabContext.grab } }
+defined object Test
+
+scala>
+
+scala>
diff --git a/test/files/run/macro-system-properties.scala b/test/files/run/macro-system-properties.scala
new file mode 100644
index 0000000000..e182defc81
--- /dev/null
+++ b/test/files/run/macro-system-properties.scala
@@ -0,0 +1,16 @@
+import scala.tools.nsc._
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ import language.experimental._, reflect.macros.Context
+ object GrabContext {
+ def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
+ // System.properties lets you stash true globals (unlike statics which are classloader scoped)
+ def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
+ def grab() = macro impl
+ }
+ object Test { class C(implicit a: Any) { GrabContext.grab } }
+ object Test { class C(implicit a: Any) { GrabContext.grab } }
+ """
+}
diff --git a/test/files/run/macro-def-path-dependent-a.check b/test/files/run/macro-term-declared-in-annotation.check
index 7658ad2c24..7658ad2c24 100644
--- a/test/files/run/macro-def-path-dependent-a.check
+++ b/test/files/run/macro-term-declared-in-annotation.check
diff --git a/test/files/run/macro-declared-in-annotation.flags b/test/files/run/macro-term-declared-in-annotation.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-annotation.flags
+++ b/test/files/run/macro-term-declared-in-annotation.flags
diff --git a/test/files/run/macro-declared-in-annotation/Impls_1.scala b/test/files/run/macro-term-declared-in-annotation/Impls_1.scala
index 1ea06de679..1ea06de679 100644
--- a/test/files/run/macro-declared-in-annotation/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-annotation/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-annotation/Macros_2.scala b/test/files/run/macro-term-declared-in-annotation/Macros_2.scala
index 40d71c62fb..40d71c62fb 100644
--- a/test/files/run/macro-declared-in-annotation/Macros_2.scala
+++ b/test/files/run/macro-term-declared-in-annotation/Macros_2.scala
diff --git a/test/files/run/macro-declared-in-annotation/Test_3.scala b/test/files/run/macro-term-declared-in-annotation/Test_3.scala
index 866487f028..866487f028 100644
--- a/test/files/run/macro-declared-in-annotation/Test_3.scala
+++ b/test/files/run/macro-term-declared-in-annotation/Test_3.scala
diff --git a/test/files/run/macro-declared-in-anonymous.check b/test/files/run/macro-term-declared-in-anonymous.check
index 09b8d015a6..09b8d015a6 100644
--- a/test/files/run/macro-declared-in-anonymous.check
+++ b/test/files/run/macro-term-declared-in-anonymous.check
diff --git a/test/files/run/macro-declared-in-anonymous.flags b/test/files/run/macro-term-declared-in-anonymous.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-anonymous.flags
+++ b/test/files/run/macro-term-declared-in-anonymous.flags
diff --git a/test/files/run/macro-declared-in-anonymous/Impls_1.scala b/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-anonymous/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-anonymous/Impls_1.scala
diff --git a/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala
new file mode 100644
index 0000000000..88cd29ae4f
--- /dev/null
+++ b/test/files/run/macro-term-declared-in-anonymous/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+import scala.language.reflectiveCalls
+
+object Test extends App {
+ val macros = new { def foo = macro Impls.foo }
+ macros.foo
+}
diff --git a/test/files/run/macro-declared-in-block.check b/test/files/run/macro-term-declared-in-block.check
index 5e687db8bf..5e687db8bf 100644
--- a/test/files/run/macro-declared-in-block.check
+++ b/test/files/run/macro-term-declared-in-block.check
diff --git a/test/files/run/macro-declared-in-block.flags b/test/files/run/macro-term-declared-in-block.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-block.flags
+++ b/test/files/run/macro-term-declared-in-block.flags
diff --git a/test/files/run/macro-declared-in-block/Impls_1.scala b/test/files/run/macro-term-declared-in-block/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-block/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-block/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-block/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala
index 69088e24bc..69088e24bc 100644
--- a/test/files/run/macro-declared-in-block/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-block/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-class-class.check b/test/files/run/macro-term-declared-in-class-class.check
index 47248d7af7..47248d7af7 100644
--- a/test/files/run/macro-declared-in-class-class.check
+++ b/test/files/run/macro-term-declared-in-class-class.check
diff --git a/test/files/run/macro-declared-in-class-class.flags b/test/files/run/macro-term-declared-in-class-class.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-class-class.flags
+++ b/test/files/run/macro-term-declared-in-class-class.flags
diff --git a/test/files/run/macro-declared-in-class-class/Impls_1.scala b/test/files/run/macro-term-declared-in-class-class/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-class-class/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-class-class/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala
index 871857a97f..871857a97f 100644
--- a/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-class-class/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-class-object.check b/test/files/run/macro-term-declared-in-class-object.check
index 35af59e40f..35af59e40f 100644
--- a/test/files/run/macro-declared-in-class-object.check
+++ b/test/files/run/macro-term-declared-in-class-object.check
diff --git a/test/files/run/macro-declared-in-class-object.flags b/test/files/run/macro-term-declared-in-class-object.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-class-object.flags
+++ b/test/files/run/macro-term-declared-in-class-object.flags
diff --git a/test/files/run/macro-declared-in-class-object/Impls_1.scala b/test/files/run/macro-term-declared-in-class-object/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-class-object/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-class-object/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala
index 994f9fe935..994f9fe935 100644
--- a/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-class-object/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-class.check b/test/files/run/macro-term-declared-in-class.check
index a1c1d7af8b..a1c1d7af8b 100644
--- a/test/files/run/macro-declared-in-class.check
+++ b/test/files/run/macro-term-declared-in-class.check
diff --git a/test/files/run/macro-declared-in-class.flags b/test/files/run/macro-term-declared-in-class.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-class.flags
+++ b/test/files/run/macro-term-declared-in-class.flags
diff --git a/test/files/run/macro-declared-in-class/Impls_1.scala b/test/files/run/macro-term-declared-in-class/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-class/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-class/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala
index 1b9d13e775..1b9d13e775 100644
--- a/test/files/run/macro-declared-in-class/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-class/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-default-param.check b/test/files/run/macro-term-declared-in-default-param.check
index 6decd7aa4d..6decd7aa4d 100644
--- a/test/files/run/macro-declared-in-default-param.check
+++ b/test/files/run/macro-term-declared-in-default-param.check
diff --git a/test/files/run/macro-declared-in-default-param.flags b/test/files/run/macro-term-declared-in-default-param.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-default-param.flags
+++ b/test/files/run/macro-term-declared-in-default-param.flags
diff --git a/test/files/run/macro-declared-in-default-param/Impls_1.scala b/test/files/run/macro-term-declared-in-default-param/Impls_1.scala
index 4380f40b04..4380f40b04 100644
--- a/test/files/run/macro-declared-in-default-param/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-default-param/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala
index 356029e63e..356029e63e 100644
--- a/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-default-param/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-implicit-class.check b/test/files/run/macro-term-declared-in-implicit-class.check
index 5dc968c08c..5dc968c08c 100644
--- a/test/files/run/macro-declared-in-implicit-class.check
+++ b/test/files/run/macro-term-declared-in-implicit-class.check
diff --git a/test/files/run/macro-declared-in-implicit-class.flags b/test/files/run/macro-term-declared-in-implicit-class.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-implicit-class.flags
+++ b/test/files/run/macro-term-declared-in-implicit-class.flags
diff --git a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala b/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala
index 4c009cc367..4c009cc367 100644
--- a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
+++ b/test/files/run/macro-term-declared-in-implicit-class/Impls_Macros_1.scala
diff --git a/test/files/run/macro-declared-in-implicit-class/Test_2.scala b/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala
index d0bc9cc38c..d0bc9cc38c 100644
--- a/test/files/run/macro-declared-in-implicit-class/Test_2.scala
+++ b/test/files/run/macro-term-declared-in-implicit-class/Test_2.scala
diff --git a/test/files/run/macro-declared-in-method.check b/test/files/run/macro-term-declared-in-method.check
index 5e687db8bf..5e687db8bf 100644
--- a/test/files/run/macro-declared-in-method.check
+++ b/test/files/run/macro-term-declared-in-method.check
diff --git a/test/files/run/macro-declared-in-method.flags b/test/files/run/macro-term-declared-in-method.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-method.flags
+++ b/test/files/run/macro-term-declared-in-method.flags
diff --git a/test/files/run/macro-declared-in-method/Impls_1.scala b/test/files/run/macro-term-declared-in-method/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-method/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-method/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-method/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala
index ed5c8b7c43..ed5c8b7c43 100644
--- a/test/files/run/macro-declared-in-method/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-method/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-object-class.check b/test/files/run/macro-term-declared-in-object-class.check
index 47248d7af7..47248d7af7 100644
--- a/test/files/run/macro-declared-in-object-class.check
+++ b/test/files/run/macro-term-declared-in-object-class.check
diff --git a/test/files/run/macro-declared-in-object-class.flags b/test/files/run/macro-term-declared-in-object-class.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-object-class.flags
+++ b/test/files/run/macro-term-declared-in-object-class.flags
diff --git a/test/files/run/macro-declared-in-object-class/Impls_1.scala b/test/files/run/macro-term-declared-in-object-class/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-object-class/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-object-class/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala
index 204deed61c..204deed61c 100644
--- a/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-object-class/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-object-object.check b/test/files/run/macro-term-declared-in-object-object.check
index 35af59e40f..35af59e40f 100644
--- a/test/files/run/macro-declared-in-object-object.check
+++ b/test/files/run/macro-term-declared-in-object-object.check
diff --git a/test/files/run/macro-declared-in-object-object.flags b/test/files/run/macro-term-declared-in-object-object.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-object-object.flags
+++ b/test/files/run/macro-term-declared-in-object-object.flags
diff --git a/test/files/run/macro-declared-in-object-object/Impls_1.scala b/test/files/run/macro-term-declared-in-object-object/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-object-object/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-object-object/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala
index e261a50f3d..e261a50f3d 100644
--- a/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-object-object/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-object.check b/test/files/run/macro-term-declared-in-object.check
index 4d955a96b1..4d955a96b1 100644
--- a/test/files/run/macro-declared-in-object.check
+++ b/test/files/run/macro-term-declared-in-object.check
diff --git a/test/files/run/macro-declared-in-object.flags b/test/files/run/macro-term-declared-in-object.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-object.flags
+++ b/test/files/run/macro-term-declared-in-object.flags
diff --git a/test/files/run/macro-declared-in-object/Impls_1.scala b/test/files/run/macro-term-declared-in-object/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-object/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-object/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala
index a5a4862ba0..a5a4862ba0 100644
--- a/test/files/run/macro-declared-in-object/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-object/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-package-object.check b/test/files/run/macro-term-declared-in-package-object.check
index bc0069178d..bc0069178d 100644
--- a/test/files/run/macro-declared-in-package-object.check
+++ b/test/files/run/macro-term-declared-in-package-object.check
diff --git a/test/files/run/macro-declared-in-package-object.flags b/test/files/run/macro-term-declared-in-package-object.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-package-object.flags
+++ b/test/files/run/macro-term-declared-in-package-object.flags
diff --git a/test/files/run/macro-declared-in-package-object/Impls_1.scala b/test/files/run/macro-term-declared-in-package-object/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-package-object/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-package-object/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala
index 54a5962e80..54a5962e80 100644
--- a/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-package-object/Macros_Test_2.scala
diff --git a/test/files/run/macro-declared-in-refinement.check b/test/files/run/macro-term-declared-in-refinement.check
index 09b8d015a6..09b8d015a6 100644
--- a/test/files/run/macro-declared-in-refinement.check
+++ b/test/files/run/macro-term-declared-in-refinement.check
diff --git a/test/files/run/macro-declared-in-refinement.flags b/test/files/run/macro-term-declared-in-refinement.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-refinement.flags
+++ b/test/files/run/macro-term-declared-in-refinement.flags
diff --git a/test/files/run/macro-declared-in-refinement/Impls_1.scala b/test/files/run/macro-term-declared-in-refinement/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-refinement/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-refinement/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala
index f746c2da57..ff9a66d58a 100644
--- a/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-refinement/Macros_Test_2.scala
@@ -1,6 +1,9 @@
+
+import scala.language.reflectiveCalls
+
class Base
object Test extends App {
val macros = new Base { def foo = macro Impls.foo }
macros.foo
-} \ No newline at end of file
+}
diff --git a/test/files/run/macro-declared-in-trait.check b/test/files/run/macro-term-declared-in-trait.check
index 0d70ac74f3..0d70ac74f3 100644
--- a/test/files/run/macro-declared-in-trait.check
+++ b/test/files/run/macro-term-declared-in-trait.check
diff --git a/test/files/run/macro-declared-in-trait.flags b/test/files/run/macro-term-declared-in-trait.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/run/macro-declared-in-trait.flags
+++ b/test/files/run/macro-term-declared-in-trait.flags
diff --git a/test/files/run/macro-declared-in-trait/Impls_1.scala b/test/files/run/macro-term-declared-in-trait/Impls_1.scala
index 348f3420f2..348f3420f2 100644
--- a/test/files/run/macro-declared-in-trait/Impls_1.scala
+++ b/test/files/run/macro-term-declared-in-trait/Impls_1.scala
diff --git a/test/files/run/macro-declared-in-trait/Macros_Test_2.scala b/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala
index f75906b636..f75906b636 100644
--- a/test/files/run/macro-declared-in-trait/Macros_Test_2.scala
+++ b/test/files/run/macro-term-declared-in-trait/Macros_Test_2.scala
diff --git a/test/files/run/macro-toplevel-companion-b/Test_2.scala b/test/files/run/macro-toplevel-companion-b/Test_2.scala
index ca202d053f..4e766bde89 100644
--- a/test/files/run/macro-toplevel-companion-b/Test_2.scala
+++ b/test/files/run/macro-toplevel-companion-b/Test_2.scala
@@ -7,5 +7,5 @@ import Macros._
object Test extends App {
val tb = cm.mkToolBox()
try tb.compile(Select(Ident(TermName("Macros")), TermName("foo")))
- catch { case ToolBoxError(message, _) => println("""macroSynthetic-.*?\.scala""".r.replaceAllIn(message, "<synthetic file name>")) }
+ catch { case ToolBoxError(message, _) => println("""(Found in|and) .*?compileLateSynthetic-.*?\.scala""".r.replaceAllIn(message, m => m.group(1) + " <synthetic file name>")) }
} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-c.check b/test/files/run/macro-toplevel-companion-c.check
index 8b422c2061..4052c472f8 100644
--- a/test/files/run/macro-toplevel-companion-c.check
+++ b/test/files/run/macro-toplevel-companion-c.check
@@ -1,3 +1,3 @@
error: Companions 'class C' and 'object C' must be defined in same file:
- Found in <synthetic file name> and newSource1
+ Found in <synthetic file name> and newSource1.scala
diff --git a/test/files/run/macro-toplevel-companion-c.scala b/test/files/run/macro-toplevel-companion-c.scala
index 0e99903158..c315f8b942 100644
--- a/test/files/run/macro-toplevel-companion-c.scala
+++ b/test/files/run/macro-toplevel-companion-c.scala
@@ -45,7 +45,7 @@ object Test extends DirectTest {
log("Compiling Test_2...")
if (compileTest()) log("Success!") else log("Failed...")
}
- println("""macroSynthetic-.*?\.scala""".r.replaceAllIn(baos.toString, "<synthetic file name>"))
+ println("""(Found in|and) .*?compileLateSynthetic-.*?\.scala""".r.replaceAllIn(baos.toString, m => m.group(1) + " <synthetic file name>"))
System.setErr(prevErr)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-typecheck-macrosdisabled.check b/test/files/run/macro-typecheck-macrosdisabled.check
index 29a881f8b1..e0e880ab66 100644
--- a/test/files/run/macro-typecheck-macrosdisabled.check
+++ b/test/files/run/macro-typecheck-macrosdisabled.check
@@ -7,7 +7,7 @@
$treecreator1.super.<init>();
()
};
- def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
val $u: U = $m$untyped.universe;
val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
$u.Literal.apply($u.Constant.apply(2))
@@ -20,7 +20,7 @@
$typecreator2.super.<init>();
()
};
- def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
val $u: U = $m$untyped.universe;
val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
$u.ConstantType.apply($u.Constant.apply(2))
diff --git a/test/files/run/macro-typecheck-macrosdisabled2.check b/test/files/run/macro-typecheck-macrosdisabled2.check
index 75fd693722..347dfec1dc 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2.check
+++ b/test/files/run/macro-typecheck-macrosdisabled2.check
@@ -7,7 +7,7 @@
$treecreator1.super.<init>();
()
};
- def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
val $u: U = $m$untyped.universe;
val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
$u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.TermName.apply("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
@@ -20,7 +20,7 @@
$typecreator2.super.<init>();
()
};
- def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
val $u: U = $m$untyped.universe;
val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
$u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
diff --git a/test/files/run/macro-undetparams-consfromsls.check b/test/files/run/macro-undetparams-consfromsls.check
index b10a90043e..3fee58d9c1 100644
--- a/test/files/run/macro-undetparams-consfromsls.check
+++ b/test/files/run/macro-undetparams-consfromsls.check
@@ -1,5 +1,5 @@
-A = TypeTag[Int]
-B = TypeTag[Nothing]
+A = WeakTypeTag[Int]
+B = WeakTypeTag[Nothing]
List(1)
-A = TypeTag[Any]
+A = WeakTypeTag[Any]
List(abc, 1)
diff --git a/test/files/run/macro-undetparams-macroitself.check b/test/files/run/macro-undetparams-macroitself.check
index fa6b335afb..a9bf55423e 100644
--- a/test/files/run/macro-undetparams-macroitself.check
+++ b/test/files/run/macro-undetparams-macroitself.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
diff --git a/test/files/run/manifests-new.scala b/test/files/run/manifests-new.scala
index f1596dee81..8b42e3ca73 100644
--- a/test/files/run/manifests-new.scala
+++ b/test/files/run/manifests-new.scala
@@ -1,3 +1,6 @@
+
+
+import scala.language.{ higherKinds, postfixOps }
import scala.reflect.runtime.universe._
object Test
@@ -146,4 +149,4 @@ object Test
}
def main(args: Array[String]): Unit = runAllTests
-} \ No newline at end of file
+}
diff --git a/test/files/run/manifests-old.scala b/test/files/run/manifests-old.scala
index 621689a254..d8b1e751d4 100644
--- a/test/files/run/manifests-old.scala
+++ b/test/files/run/manifests-old.scala
@@ -1,3 +1,6 @@
+import scala.language.{ higherKinds, postfixOps }
+
+@deprecated("Suppress warnings", since="2.11")
object Test
{
object Variances extends Enumeration {
@@ -144,4 +147,4 @@ object Test
}
def main(args: Array[String]): Unit = runAllTests
-} \ No newline at end of file
+}
diff --git a/test/files/run/mapConserve.scala b/test/files/run/mapConserve.scala
index 013095b1de..330fb34ca1 100644
--- a/test/files/run/mapConserve.scala
+++ b/test/files/run/mapConserve.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
@@ -50,4 +53,4 @@ object Test {
checkStackOverflow();
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/memberpos.check b/test/files/run/memberpos.check
new file mode 100644
index 0000000000..9e3a807f5a
--- /dev/null
+++ b/test/files/run/memberpos.check
@@ -0,0 +1,11 @@
+newSource1.scala
+2,4 class A
+6,28 object A
+ 7,10 def bippy
+ 8 def hello
+ 11,27 class Dingo
+ 12,26 def foooooz
+ 22 val a
+30 class B
+ 30 def f
+
diff --git a/test/files/run/memberpos.scala b/test/files/run/memberpos.scala
new file mode 100644
index 0000000000..f2b79c0ec1
--- /dev/null
+++ b/test/files/run/memberpos.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest._
+
+// Simple sanity test for -Yshow-member-pos.
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -Ystop-after:parser -Yshow-member-pos \"\" -d " + testOutput.path
+ override def show() = compile()
+ override def code = """
+class A(val a: Int = 1) {
+
+}
+
+object A {
+ def bippy = {
+ def hello = 55
+ "" + hello
+ }
+ class Dingo {
+ def foooooz = /****
+
+
+
+
+
+ ****/ {
+
+
+
+ val a = 1
+
+
+ a
+ }
+ }
+}
+
+class B { def f = 1 }
+
+"""
+}
diff --git a/test/files/run/misc.check b/test/files/run/misc.check
index 9fa7b72d50..56116f8104 100644
--- a/test/files/run/misc.check
+++ b/test/files/run/misc.check
@@ -1,3 +1,27 @@
+misc.scala:46: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 42;
+ ^
+misc.scala:47: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 42l;
+ ^
+misc.scala:48: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 23.5f;
+ ^
+misc.scala:49: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 23.5;
+ ^
+misc.scala:50: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ "Hello";
+ ^
+misc.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 32 + 45;
+ ^
+misc.scala:62: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ x;
+ ^
+misc.scala:74: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 1 < 2;
+ ^
### Hello
### 17
### Bye
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index f253de71d6..0037822f3b 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -1,3 +1,7 @@
+names-defaults.scala:269: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ spawn(b = { val ttt = 1; ttt }, a = 0)
+ ^
+warning: there were 4 deprecation warning(s); re-run with -deprecation for details
1: @
get: $
get: 2
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 220414f02a..05cd4a540c 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, existentials }
+
object Test extends App {
def get[T](x: T) = { println("get: "+ x); x }
diff --git a/test/files/run/no-pickle-skolems/Test_2.scala b/test/files/run/no-pickle-skolems/Test_2.scala
index 90bb4c4f88..8fd6016aea 100644
--- a/test/files/run/no-pickle-skolems/Test_2.scala
+++ b/test/files/run/no-pickle-skolems/Test_2.scala
@@ -1,3 +1,5 @@
+
+import scala.language.reflectiveCalls
import scala.reflect.runtime.universe._
object Test {
diff --git a/test/files/run/option-fold.scala b/test/files/run/option-fold.scala
index d554ba4f9b..7e21403c2e 100644
--- a/test/files/run/option-fold.scala
+++ b/test/files/run/option-fold.scala
@@ -7,13 +7,14 @@ object Test {
def g(x: Option[A]) = x.fold(-1) {
case B => 0
case C(x) => x
+ case _ => ???
}
def main(args: Array[String]): Unit = {
- println(f(None))
- println(f(Some(5)))
- println(g(None))
- println(g(Some(B)))
- println(g(Some(C(1))))
+ println(f(None)) //List()
+ println(f(Some(5))) //List(5)
+ println(g(None)) //-1
+ println(g(Some(B))) //0
+ println(g(Some(C(1)))) //1
}
}
diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala
index dfc78e2ca5..56e4e01598 100644
--- a/test/files/run/patmat-exprs.scala
+++ b/test/files/run/patmat-exprs.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions }
import runtime.ScalaRunTime
object Test {
@@ -576,4 +578,4 @@ trait Pattern {
implicit def long2Constant[T](l: Long)(implicit num: NumericOps[T]): Leaf[T] =
const(num.fromDouble(l.toDouble))
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/patmat_unapp_abstype-new.check b/test/files/run/patmat_unapp_abstype-new.check
index 42c54631d2..c5511e9516 100644
--- a/test/files/run/patmat_unapp_abstype-new.check
+++ b/test/files/run/patmat_unapp_abstype-new.check
@@ -1,3 +1,9 @@
+patmat_unapp_abstype-new.scala:21: warning: abstract type pattern TypesUser.this.TypeRef is unchecked since it is eliminated by erasure
+ case TypeRef(x) => println("TypeRef")
+ ^
+patmat_unapp_abstype-new.scala:53: warning: abstract type pattern Intermed.this.Foo is unchecked since it is eliminated by erasure
+ case Foo(x) => println("Foo")
+ ^
TypeRef
MethodType
Bar
diff --git a/test/files/run/patmatnew.check b/test/files/run/patmatnew.check
index e69de29bb2..56b8ac2f4f 100644
--- a/test/files/run/patmatnew.check
+++ b/test/files/run/patmatnew.check
@@ -0,0 +1,15 @@
+patmatnew.scala:351: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ case 1 => "OK"
+ ^
+patmatnew.scala:352: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ case 2 => assert(false); "KO"
+ ^
+patmatnew.scala:353: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ case 3 => assert(false); "KO"
+ ^
+patmatnew.scala:670: warning: This catches all Throwables. If this is really intended, use `case e : Throwable` to clear this warning.
+ case e => {
+ ^
+patmatnew.scala:489: warning: unreachable code
+ case _ if false =>
+ ^
diff --git a/test/files/run/patmatnew.scala b/test/files/run/patmatnew.scala
index a6f8199457..fb49e0c924 100644
--- a/test/files/run/patmatnew.scala
+++ b/test/files/run/patmatnew.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ postfixOps }
+
object Test {
def main(args: Array[String]) {
diff --git a/test/files/run/pc-conversions.scala b/test/files/run/pc-conversions.scala
index 60ee59c3c4..b1ef3d963e 100644
--- a/test/files/run/pc-conversions.scala
+++ b/test/files/run/pc-conversions.scala
@@ -1,4 +1,6 @@
-
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import collection._
diff --git a/test/files/run/pf-catch.scala b/test/files/run/pf-catch.scala
index ba0781fe89..dfe43dc750 100644
--- a/test/files/run/pf-catch.scala
+++ b/test/files/run/pf-catch.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ postfixOps }
object Test {
def shortName(x: AnyRef) = x.getClass.getName split '.' last
type Handler[+T] = PartialFunction[Throwable, T]
@@ -27,7 +29,7 @@ object Test {
def main(args: Array[String]): Unit = {
try f1
- catch { case x => println(shortName(x) + " slipped by.") }
+ catch { case x: Throwable => println(shortName(x) + " slipped by.") }
f2
}
diff --git a/test/files/run/preinits.check b/test/files/run/preinits.check
index 5584ab261e..e97a14b77f 100644
--- a/test/files/run/preinits.check
+++ b/test/files/run/preinits.check
@@ -1,3 +1,9 @@
+preinits.scala:2: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+trait B extends { override val x = 1 } with A { println("B") }
+ ^
+preinits.scala:3: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+trait C extends { override val x = 2 } with A
+ ^
A
B
2
diff --git a/test/files/run/primitive-sigs-2-new.scala b/test/files/run/primitive-sigs-2-new.scala
index cf6de9c81b..1f39667b18 100644
--- a/test/files/run/primitive-sigs-2-new.scala
+++ b/test/files/run/primitive-sigs-2-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ postfixOps }
import scala.reflect.{ClassTag, classTag}
import java.{ lang => jl }
@@ -29,4 +31,4 @@ object Test {
println(new C f)
c3m.sorted foreach println
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/primitive-sigs-2-old.scala b/test/files/run/primitive-sigs-2-old.scala
index b7152f7e3d..0a4be93f22 100644
--- a/test/files/run/primitive-sigs-2-old.scala
+++ b/test/files/run/primitive-sigs-2-old.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ postfixOps }
import java.{ lang => jl }
trait T[A] {
diff --git a/test/files/run/private-inline.check b/test/files/run/private-inline.check
index 209e3ef4b6..eee7f2973b 100644
--- a/test/files/run/private-inline.check
+++ b/test/files/run/private-inline.check
@@ -1 +1,7 @@
+private-inline.scala:24: warning: Could not inline required method wrapper1 because callee contains exception handlers / finally clause, and is invoked with non-empty operand stack.
+ def f1b() = identity(wrapper1(5))
+ ^
+private-inline.scala:29: warning: Could not inline required method wrapper2 because callee contains exception handlers / finally clause, and is invoked with non-empty operand stack.
+ def f2b() = identity(wrapper2(5))
+ ^
20
diff --git a/test/files/run/private-inline.flags b/test/files/run/private-inline.flags
index eb4d19bcb9..00d3643fd4 100644
--- a/test/files/run/private-inline.flags
+++ b/test/files/run/private-inline.flags
@@ -1 +1 @@
--optimise \ No newline at end of file
+-optimise -Yinline-warnings
diff --git a/test/files/run/private-inline.scala b/test/files/run/private-inline.scala
index a62007779c..72cabaeff0 100644
--- a/test/files/run/private-inline.scala
+++ b/test/files/run/private-inline.scala
@@ -43,7 +43,7 @@ object Test {
val foundClass = (
try Class.forName(clazz)
- catch { case _ => null }
+ catch { case _: Throwable => null }
)
assert(foundClass == null, foundClass)
diff --git a/test/files/run/programmatic-main.scala b/test/files/run/programmatic-main.scala
index 7bc5c5dfcf..08335ca6f6 100644
--- a/test/files/run/programmatic-main.scala
+++ b/test/files/run/programmatic-main.scala
@@ -1,3 +1,5 @@
+
+import scala.language.postfixOps
import scala.tools.nsc._
import io.Path
diff --git a/test/files/run/range.scala b/test/files/run/range.scala
index f08b2105d3..b81e67921a 100644
--- a/test/files/run/range.scala
+++ b/test/files/run/range.scala
@@ -16,6 +16,17 @@ object Test {
catch { case _: IllegalArgumentException => true }
)
assert(caught)
+ // #7432
+ val noElemAtMin = (
+ try { (10 until 10).min ; false }
+ catch { case _: NoSuchElementException => true }
+ )
+ assert(noElemAtMin)
+ val noElemAtMax = (
+ try { (10 until 10).max ; false }
+ catch { case _: NoSuchElementException => true }
+ )
+ assert(noElemAtMax)
}
case class GR[T](val x: T)(implicit val num: Integral[T]) {
diff --git a/test/files/run/records.scala b/test/files/run/records.scala
index 96b0b4cb0f..bae0d943ff 100644
--- a/test/files/run/records.scala
+++ b/test/files/run/records.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
trait C {
def f: Int
}
diff --git a/test/files/run/reflection-allmirrors-tostring.scala b/test/files/run/reflection-allmirrors-tostring.scala
index 0ca387a6b1..41bab5ac0e 100644
--- a/test/files/run/reflection-allmirrors-tostring.scala
+++ b/test/files/run/reflection-allmirrors-tostring.scala
@@ -1,3 +1,5 @@
+
+import scala.language.higherKinds
import scala.reflect.runtime.universe._
class C {
@@ -40,4 +42,4 @@ object Test extends App {
val cc = typeOf[C].member(TypeName("C")).asClass
println(cm.reflectClass(c).reflectConstructor(c.typeSignature.member(nme.CONSTRUCTOR).asMethod))
println(im.reflectClass(cc).reflectConstructor(cc.typeSignature.member(nme.CONSTRUCTOR).asMethod))
-} \ No newline at end of file
+}
diff --git a/test/files/run/reflection-fieldmirror-getsetval.check b/test/files/run/reflection-fieldmirror-getsetval.check
index 82fef37c25..1e959a9900 100644
--- a/test/files/run/reflection-fieldmirror-getsetval.check
+++ b/test/files/run/reflection-fieldmirror-getsetval.check
@@ -1,2 +1,2 @@
42
-cannot set an immutable field x
+2
diff --git a/test/files/run/reflection-fieldmirror-getsetval.scala b/test/files/run/reflection-fieldmirror-getsetval.scala
index 9cacb7080b..6a88dc3118 100644
--- a/test/files/run/reflection-fieldmirror-getsetval.scala
+++ b/test/files/run/reflection-fieldmirror-getsetval.scala
@@ -12,13 +12,7 @@ object Test extends App {
val cs = im.symbol
val f = cs.typeSignature.declaration(TermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
val fm: FieldMirror = im.reflectField(f)
- try {
- println(fm.get)
- fm.set(2)
- println(fm.get)
- println("this indicates a failure")
- } catch {
- case ex: Throwable =>
- println(ex.getMessage)
- }
+ println(fm.get)
+ fm.set(2)
+ println(fm.get)
}
diff --git a/test/files/run/reflection-implicit.scala b/test/files/run/reflection-implicit.scala
index 0bcb0bc3a0..f2b3ba960c 100644
--- a/test/files/run/reflection-implicit.scala
+++ b/test/files/run/reflection-implicit.scala
@@ -1,3 +1,5 @@
+
+import scala.language.implicitConversions
import scala.reflect.runtime.universe._
class C {
@@ -12,4 +14,4 @@ object Test extends App {
val param = decls.find(_.name.toString == "d").get.asMethod.paramss.last.head
param.typeSignature
println(param.isImplicit)
-} \ No newline at end of file
+}
diff --git a/test/files/run/reflection-magicsymbols-vanilla.scala b/test/files/run/reflection-magicsymbols-vanilla.scala
index 26b70460eb..2bde3d8874 100644
--- a/test/files/run/reflection-magicsymbols-vanilla.scala
+++ b/test/files/run/reflection-magicsymbols-vanilla.scala
@@ -1,3 +1,5 @@
+import scala.language.postfixOps
+
class A {
def foo1(x: Int*) = ???
def foo2(x: => Int) = ???
diff --git a/test/files/run/reify-each-node-type.check b/test/files/run/reify-each-node-type.check
new file mode 100644
index 0000000000..afc65add7a
--- /dev/null
+++ b/test/files/run/reify-each-node-type.check
@@ -0,0 +1,35 @@
+ 1 s Ident
+ 2 r.List Select
+ 3 r.List.apply() Apply
+ 4 r.List.apply(1) Literal
+ 5 r.List.apply[Int]() TypeApply
+ 6 (1: Int) Typed
+ 7 (null: r.List[Int]) AppliedTypeTree
+ 8 { (); () } Block
+ 9 { val x: Int = 0; () } ValDef
+10 { val x = 0; () } TypeTree
+11 if (true) () else () If
+12 { def f: Unit = (); () } DefDef
+13 { def m = NN.super.q; () } Super
+14 { abstract trait A extends AnyRef; () } ClassDef Template
+15 { def f(x: Any): Unit = (); () } EmptyTree
+16 (null: r.D with r.E) CompoundTypeTree
+17 { type T = Int; () } TypeDef
+18 { type CC[T <: r.D] = r.C[T]; () } TypeBoundsTree
+19 try { 0 } finally Predef.println("") Try
+20 ((x: Int) => x) Function
+21 { var v = 1; v = 2 } Assign
+22 { class A extends AnyRef { def <init>() = { super.<init>(); This
+23 new r.List[Int]() New
+24 0: @unchecked Annotated
+25 (null: r.Outer#Inner) SelectFromTypeTree
+26 (null: Nil.type) SingletonTypeTree
+27 (null: T forSome { type T }) ExistentialTypeTree
+28 { import r.{A, B=>C}; () } Import
+29 { def f: Int = return 0; () } Return
+30 { object x extends AnyRef { def <init>() = { super.<init>(); ModuleDef
+31 throw new Exception() Throw
+32 0 match { case _ => 0 } Match CaseDef
+33 0 match { case (1| 2) => 0 } Alternative
+34 NN.q match { case (x @ r.List) => 0 } Bind
+35 NN.q match { case r.UnSeq(1, (_)*) => 0 } Star
diff --git a/test/files/run/reify-each-node-type.scala b/test/files/run/reify-each-node-type.scala
new file mode 100644
index 0000000000..425061f955
--- /dev/null
+++ b/test/files/run/reify-each-node-type.scala
@@ -0,0 +1,110 @@
+
+import scala.language.{ existentials, postfixOps }
+import scala.reflect.runtime.universe._
+
+object r {
+ class A
+ class B
+ class List[+A]
+ object List { def apply[A](xs: A*): List[A] = new List[A] }
+ object Nil extends List[Nothing]
+
+ trait OuterP[A] {
+ trait Inner
+ trait InnerP[B]
+ }
+ trait Outer {
+ trait Inner
+ trait InnerP[B]
+ }
+ object Un { def unapply(x: Any) = Some(5) }
+ object UnSeq { def unapplySeq(x: Any) = Some(Seq(5)) }
+ class C[T]
+ class D
+ trait E
+
+ trait SN {
+ def q: Any = null
+ }
+}
+
+object s {
+ import r._
+
+ trait NN extends SN {
+ def act[T](expr: Expr[T]): Unit
+
+ act(reify { s /* Ident */ })
+ act(reify { r.List /* Select */ })
+ act(reify { List() /* Apply */ })
+ act(reify { List(1) /* Literal */ })
+ act(reify { List[Int]() /* TypeApply */ })
+ act(reify { 1: Int /* Typed */ })
+ act(reify { null: List[Int] /* AppliedTypeTree */ })
+ act(reify { () ; () /* Block */ })
+ act(reify { val x: Int = 0 /* ValDef */ })
+ act(reify { val x = 0 /* TypeTree */ })
+ act(reify { if (true) () /* If */ })
+ act(reify { def f { } /* DefDef */ })
+ act(reify { def m = super.q /* Super */ })
+ act(reify { trait A /* ClassDef Template */ })
+ act(reify { def f(x: Any) { } /* EmptyTree */ })
+ act(reify { null: D with E /* CompoundTypeTree */ })
+ act(reify { type T = Int /* TypeDef */ })
+ act(reify { type CC[T <: D] = C[T] /* TypeBoundsTree */ })
+ act(reify { try 0 finally println("") /* Try */ })
+ act(reify { (x: Int) => x /* Function */ })
+ act(reify { var v = 1 ; v = 2 /* Assign */ })
+ act(reify { class A() { def this(x: A) = this() } /* This */ })
+ act(reify { new List[Int] /* New */ })
+ act(reify { 0: @unchecked /* Annotated */ })
+ act(reify { null: Outer#Inner /* SelectFromTypeTree */ })
+ act(reify { null: Nil.type /* SingletonTypeTree */ })
+ act(reify { null: (T forSome { type T }) /* ExistentialTypeTree */ })
+ act(reify { import r.{ A, B => C }; /* Import */ })
+ act(reify { def f: Int = return 0 /* Return */ })
+ act(reify { object x /* ModuleDef */ })
+ act(reify { throw new java.lang.Exception /* Throw */ })
+ act(reify { 0 match { case _ => 0 } /* Match CaseDef */ })
+ act(reify { 0 match { case 1 | 2 => 0 } /* Alternative */ })
+ act(reify { q match { case x @ List => 0 } /* Bind */ })
+ act(reify { q match { case UnSeq(1, _*) => 0 } /* Star */ })
+
+ // ``unexpected: bound type that doesn't have a tpe: Ident(newTypeName("Int"))''
+ // act(reify { r.List[T forSome { type T <: Int }]() }) // Was crashing , no longer
+ //
+ // error: exception during macro expansion:
+ // scala.MatchError: collection.this.Seq.unapplySeq[A] (of class scala.reflect.internal.Trees$TypeApply)
+ // at scala.reflect.reify.phases.Reshape$$anon$1.extractExtractor$1(Reshape.scala:73)
+ // at scala.reflect.reify.phases.Reshape$$anon$1.transform(Reshape.scala:82)
+ // at scala.reflect.reify.phases.Reshape$$anon$1.transform(Reshape.scala:24)
+ // at scala.reflect.internal.Trees$class.itransform(Trees.scala:1290)
+ //
+ // act(reify { r.List[Any]() match { case Seq(1, _*) => 1 } } )
+
+ // act(reify { List[OuterP[Int]#InnerP[Byte]]() })
+ //
+ // SI-7243
+ //
+ // test/files/run/reify-each-node-type.scala:85: error: Cannot materialize r.List.apply[r.OuterP[Int]#InnerP[Byte]]() as { ... } because:
+ // scala.reflect.macros.TypecheckException: value TypeTreeWithDeferredRefCheck is not a member of type parameter U
+ // act(reify { List[OuterP[Int]#InnerP[Byte]]() })
+ // ^
+ // one error found
+ }
+}
+
+object Test {
+ var idx = 0
+ val seen = scala.collection.mutable.Set[String]()
+
+ object N extends s.NN {
+ def act[T](expr: Expr[T]): Unit = {
+ idx += 1
+ val ts = expr.tree filter (_ => true) map (_.getClass.getName split "[.$]" last) filterNot seen distinct;
+ println("%2d %60s %s".format(idx, expr.tree.toString.replaceAll("""\s+""", " ").take(60), ts mkString " "))
+ seen ++= ts
+ }
+ }
+ def main(args: Array[String]): Unit = N
+}
diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check
index 1b0f3f2162..18cfd5a7ef 100644
--- a/test/files/run/reify-repl-fail-gracefully.check
+++ b/test/files/run/reify-repl-fail-gracefully.check
@@ -12,7 +12,7 @@ import scala.reflect.runtime.universe._
scala>
scala> reify
-<console>:12: error: macros cannot be partially applied
+<console>:12: error: too few argument lists for macro invocation
reify
^
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
index 99a966f38b..71841ff83b 100644
--- a/test/files/run/reify_ann1a.check
+++ b/test/files/run/reify_ann1a.check
@@ -1,5 +1,5 @@
{
- @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+ @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T] extends AnyRef {
@new ann(List.apply("3a")) @new ann(List.apply("3b")) <paramaccessor> private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _;
def <init>(@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = {
super.<init>();
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
index 6a5f32a492..a046dafeab 100644
--- a/test/files/run/reify_ann1b.check
+++ b/test/files/run/reify_ann1b.check
@@ -1,5 +1,10 @@
+reify_ann1b.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class ann(bar: String) extends annotation.ClassfileAnnotation
+ ^
{
- @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends AnyRef {
+ @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T] extends AnyRef {
@new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
super.<init>();
diff --git a/test/files/run/reify_ann2a.check b/test/files/run/reify_ann2a.check
index ccbcb4c31e..a26fa42045 100644
--- a/test/files/run/reify_ann2a.check
+++ b/test/files/run/reify_ann2a.check
@@ -6,7 +6,7 @@
()
}
};
- @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+ @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T] extends AnyRef {
@new ann(List.apply("3a")) @new ann(List.apply("3b")) <paramaccessor> private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _;
def <init>(@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = {
super.<init>();
diff --git a/test/files/run/reify_ann3.check b/test/files/run/reify_ann3.check
index 8caceb2696..d4cf660758 100644
--- a/test/files/run/reify_ann3.check
+++ b/test/files/run/reify_ann3.check
@@ -1,5 +1,5 @@
{
- class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends AnyRef {
+ class Tree[A, B] extends AnyRef {
@new inline @getter() final <paramaccessor> val key: A = _;
def <init>(key: A) = {
super.<init>();
diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check
index 0c919020a8..51f255b232 100644
--- a/test/files/run/reify_classfileann_a.check
+++ b/test/files/run/reify_classfileann_a.check
@@ -1,3 +1,8 @@
+reify_classfileann_a.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation
+ ^
{
@new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef {
def <init>() = {
diff --git a/test/files/run/reify_classfileann_b.check b/test/files/run/reify_classfileann_b.check
index c204fa8dc0..05f2e5bfc6 100644
--- a/test/files/run/reify_classfileann_b.check
+++ b/test/files/run/reify_classfileann_b.check
@@ -1,3 +1,8 @@
+reify_classfileann_b.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation
+ ^
{
class C extends AnyRef {
def <init>() = {
diff --git a/test/files/run/reify_extendbuiltins.scala b/test/files/run/reify_extendbuiltins.scala
index a2d546579d..46d5b7e55e 100644
--- a/test/files/run/reify_extendbuiltins.scala
+++ b/test/files/run/reify_extendbuiltins.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions, postfixOps }
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
@@ -12,4 +14,4 @@ object Test extends App {
println("10! = " + (10!))
}.eval
-} \ No newline at end of file
+}
diff --git a/test/files/run/reify_implicits-new.scala b/test/files/run/reify_implicits-new.scala
index 42a1deef26..1d90d907ea 100644
--- a/test/files/run/reify_implicits-new.scala
+++ b/test/files/run/reify_implicits-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions, reflectiveCalls }
import scala.reflect.{ClassTag, classTag}
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
@@ -13,4 +15,4 @@ object Test extends App {
val x = Array(2, 3, 1, 4)
println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
}.eval
-} \ No newline at end of file
+}
diff --git a/test/files/run/reify_implicits-old.scala b/test/files/run/reify_implicits-old.scala
index 8ff256d2d4..a4e90488e5 100644
--- a/test/files/run/reify_implicits-old.scala
+++ b/test/files/run/reify_implicits-old.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions, reflectiveCalls }
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
@@ -12,4 +14,4 @@ object Test extends App {
val x = Array(2, 3, 1, 4)
println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
}.eval
-} \ No newline at end of file
+}
diff --git a/test/files/run/reify_lazyevaluation.scala b/test/files/run/reify_lazyevaluation.scala
index 5b310d95f7..3f2530ddee 100644
--- a/test/files/run/reify_lazyevaluation.scala
+++ b/test/files/run/reify_lazyevaluation.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions }
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
diff --git a/test/files/run/reify_lazyunit.check b/test/files/run/reify_lazyunit.check
index 1b46c909be..579ecfe8aa 100644
--- a/test/files/run/reify_lazyunit.check
+++ b/test/files/run/reify_lazyunit.check
@@ -1,3 +1,6 @@
+reify_lazyunit.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ lazy val x = { 0; println("12")}
+ ^
12
one
two
diff --git a/test/files/run/reify_printf.check b/test/files/run/reify_printf.check
index e69de29bb2..3b18e512db 100644
--- a/test/files/run/reify_printf.check
+++ b/test/files/run/reify_printf.check
@@ -0,0 +1 @@
+hello world
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index 9932a58dfa..c4ade79837 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -9,13 +9,13 @@ import scala.reflect.internal.Types
import scala.util.matching.Regex
object Test extends App {
- val output = new ByteArrayOutputStream()
- Console.setOut(new PrintStream(output))
+ //val output = new ByteArrayOutputStream()
+ //Console.setOut(new PrintStream(output))
val toolbox = cm.mkToolBox()
val tree = tree_printf(reify("hello %s").tree, reify("world").tree)
val evaluated = toolbox.eval(tree)
- assert(output.toString() == "hello world", output.toString() +" == hello world")
+ //assert(output.toString() == "hello world", output.toString() +" == hello world")
/*
// upd. Oh, good old times, our very-very first experiments with macros :)
@@ -68,4 +68,4 @@ object Test extends App {
): Tree
Block((evals ++ prints).toList, Literal(Constant(())))
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala
index ecbf394bba..c385da6360 100644
--- a/test/files/run/reify_this.scala
+++ b/test/files/run/reify_this.scala
@@ -1,11 +1,11 @@
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
-trait Eval {
+trait Transvaal {
def eval(tree: Expr[_]) = tree.eval
}
-object Test extends App with Eval {
+object Test extends App with Transvaal {
// select a value from package
eval(reify{println("foo")})
eval(reify{println((new Object).toString == (new Object).toString)})
@@ -17,4 +17,4 @@ object Test extends App with Eval {
// select a value from module
val x = 2
eval(reify{println(x)})
-} \ No newline at end of file
+}
diff --git a/test/files/run/repl-backticks.scala b/test/files/run/repl-backticks.scala
index 5eaa1ec4c1..ec2691d9c5 100644
--- a/test/files/run/repl-backticks.scala
+++ b/test/files/run/repl-backticks.scala
@@ -8,7 +8,7 @@ object Test {
`yield`
</code>.text
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
val repl = new interpreter.IMain(settings)
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 27be3eb67d..002316fd54 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -79,7 +79,7 @@ TypeRef(
)
TypeRef(
TypeSymbol(
- sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
)
args = List(
@@ -146,7 +146,7 @@ TypeRef(
args = List(
TypeRef(
TypeSymbol(
- sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
)
args = List(
@@ -179,7 +179,7 @@ PolyType(
args = List(
TypeRef(
TypeSymbol(
- sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
)
args = List(TypeParamTypeRef(TypeParam(T <: AnyVal)))
@@ -202,7 +202,7 @@ PolyType(
params = List(TermSymbol(x: T), TermSymbol(y: List[U]))
resultType = TypeRef(
TypeSymbol(
- sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]] with Serializable
)
args = List(TypeParamTypeRef(TypeParam(U >: T)))
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index 9d63ecde94..1abb8e9d14 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -14,6 +14,7 @@ scala> global.emptyValDef // "it is imported twice in the same scope by ..."
res0: $r.global.emptyValDef.type = private val _ = _
scala> val tp = ArrayClass[scala.util.Random] // magic with tags
+warning: there were 1 feature warning(s); re-run with -feature for details
tp: $r.global.Type = Array[scala.util.Random]
scala> tp.memberType(Array_apply) // evidence
diff --git a/test/files/run/repl-term-macros.check b/test/files/run/repl-term-macros.check
new file mode 100644
index 0000000000..eae489c294
--- /dev/null
+++ b/test/files/run/repl-term-macros.check
@@ -0,0 +1,44 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.macros.Context
+import scala.reflect.macros.Context
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala>
+
+scala> def impl1(c: Context) = c.literalUnit
+impl1: (c: scala.reflect.macros.Context)c.Expr[Unit]
+
+scala> def foo1 = macro impl1
+defined term macro foo1: Unit
+
+scala> foo1
+
+scala>
+
+scala> def impl2(c: Context)() = c.literalUnit
+impl2: (c: scala.reflect.macros.Context)()c.Expr[Unit]
+
+scala> def foo2() = macro impl2
+defined term macro foo2: ()Unit
+
+scala> foo2()
+
+scala>
+
+scala> def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = c.literalUnit
+impl3: (c: scala.reflect.macros.Context)(x: c.Expr[Int])(y: c.Expr[Int])c.Expr[Unit]
+
+scala> def foo3(x: Int)(y: Int) = macro impl3
+defined term macro foo3: (x: Int)(y: Int)Unit
+
+scala> foo3(2)(3)
+
+scala>
+
+scala>
diff --git a/test/files/run/repl-term-macros.scala b/test/files/run/repl-term-macros.scala
new file mode 100644
index 0000000000..f826259be9
--- /dev/null
+++ b/test/files/run/repl-term-macros.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ import scala.reflect.macros.Context
+import language.experimental.macros
+
+def impl1(c: Context) = c.literalUnit
+def foo1 = macro impl1
+foo1
+
+def impl2(c: Context)() = c.literalUnit
+def foo2() = macro impl2
+foo2()
+
+def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = c.literalUnit
+def foo3(x: Int)(y: Int) = macro impl3
+foo3(2)(3)
+ """
+} \ No newline at end of file
diff --git a/test/files/run/resetattrs-this.scala b/test/files/run/resetattrs-this.scala
index 12afa3d712..2a55437273 100644
--- a/test/files/run/resetattrs-this.scala
+++ b/test/files/run/resetattrs-this.scala
@@ -4,8 +4,8 @@ import scala.tools.reflect.ToolBox
object Test extends App {
val tb = cm.mkToolBox()
- val tree = Select(This(cm.staticPackage("scala").moduleClass), newTermName("Predef"))
+ val tree = Select(This(cm.staticPackage("scala").moduleClass), TermName("Predef"))
val ttree = tb.typeCheck(tree)
val rttree = tb.resetAllAttrs(ttree)
println(tb.eval(rttree) == Predef)
-} \ No newline at end of file
+}
diff --git a/test/files/run/richs.scala b/test/files/run/richs.scala
index 5ee573673d..4b5345757c 100644
--- a/test/files/run/richs.scala
+++ b/test/files/run/richs.scala
@@ -11,8 +11,8 @@ trait RichTest {
val cn = this.getClass().getName()
cn.substring(0, cn.length-1)
}
- def length[A](it: Iterator[A]) = it.toList length
- def length[A](it: Iterable[A]) = it.toList length
+ def length[A](it: Iterator[A]) = it.toList.length
+ def length[A](it: Iterable[A]) = it.toList.length
def run: Unit
}
object RichCharTest1 extends RichTest {
diff --git a/test/files/run/runtime.check b/test/files/run/runtime.check
index 990a087da0..d613c9bd42 100644
--- a/test/files/run/runtime.check
+++ b/test/files/run/runtime.check
@@ -1,3 +1,9 @@
+runtime.scala:141: warning: comparing values of types Null and Null using `eq' will always yield true
+ check(true , null eq null, null ne null);
+ ^
+runtime.scala:141: warning: comparing values of types Null and Null using `ne' will always yield false
+ check(true , null eq null, null ne null);
+ ^
<<< Test0
[false,true]
[0,1,2]
diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala
index a2ac204e8a..89348b294d 100644
--- a/test/files/run/runtime.scala
+++ b/test/files/run/runtime.scala
@@ -171,7 +171,7 @@ object Test {
try {
test;
} catch {
- case exception => {
+ case exception: Throwable => {
//val name: String = Thread.currentThread().getName();
Console.print("Exception in thread \"" + name + "\" " + exception);
Console.println;
diff --git a/test/files/run/sequenceComparisons.scala b/test/files/run/sequenceComparisons.scala
index 5d7958bc7e..f8ef17d77d 100644
--- a/test/files/run/sequenceComparisons.scala
+++ b/test/files/run/sequenceComparisons.scala
@@ -82,7 +82,7 @@ object Test {
val sameElementsInputs = (
List(List(1,2,3,4,5)),
- List(Nil, List(1), List(1,2), List(2,3,4), List(2,3,4,5), List(2,3,4,5,1), List(1,2,3,5,4), seq reverse)
+ List(Nil, List(1), List(1,2), List(2,3,4), List(2,3,4,5), List(2,3,4,5,1), List(1,2,3,5,4), seq.reverse)
)
}
diff --git a/test/files/run/settings-parse.scala b/test/files/run/settings-parse.scala
index 2b04f55b24..2754feb972 100644
--- a/test/files/run/settings-parse.scala
+++ b/test/files/run/settings-parse.scala
@@ -1,3 +1,5 @@
+
+import scala.language.postfixOps
import scala.tools.nsc._
object Test {
diff --git a/test/files/run/si5045.scala b/test/files/run/si5045.scala
index e198b101f3..b0c3a4ddc4 100644
--- a/test/files/run/si5045.scala
+++ b/test/files/run/si5045.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
object Test extends App {
import scala.util.matching.{ Regex, UnanchoredRegex }
diff --git a/test/files/run/slices.scala b/test/files/run/slices.scala
index e31ea4046f..107b8e658a 100644
--- a/test/files/run/slices.scala
+++ b/test/files/run/slices.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
object Test extends App {
// lists
diff --git a/test/files/run/spec-nlreturn.scala b/test/files/run/spec-nlreturn.scala
index ec5e7229d3..a84160ecca 100644
--- a/test/files/run/spec-nlreturn.scala
+++ b/test/files/run/spec-nlreturn.scala
@@ -1,7 +1,8 @@
+
object Test {
def f(): Int = {
try {
- val g = 1 to 10 map { i => return 16 ; i } sum;
+ val g = (1 to 10 map { i => return 16 ; i }).sum
g
}
catch { case x: runtime.NonLocalReturnControl[_] =>
diff --git a/test/files/run/stream_flatmap_odds.scala b/test/files/run/stream_flatmap_odds.scala
index 6fb202ca68..1935253595 100644
--- a/test/files/run/stream_flatmap_odds.scala
+++ b/test/files/run/stream_flatmap_odds.scala
@@ -1,4 +1,4 @@
object Test extends App {
lazy val odds: Stream[Int] = Stream(1) append ( odds flatMap {x => Stream(x + 2)} )
- println(odds take 42 force)
+ Console println (odds take 42).force
}
diff --git a/test/files/run/stringbuilder-drop.scala b/test/files/run/stringbuilder-drop.scala
index a9e5a71762..422fb2bc7c 100644
--- a/test/files/run/stringbuilder-drop.scala
+++ b/test/files/run/stringbuilder-drop.scala
@@ -1,7 +1,7 @@
object Test {
def main(args: Array[String]): Unit = {
- val s = (new StringBuilder ++= "hello world") dropRight 1 toString;
- assert(s == "hello worl")
+ val s = (new StringBuilder ++= "hello world") dropRight 1
+ assert("" + s == "hello worl")
}
}
diff --git a/test/files/run/stringbuilder.scala b/test/files/run/stringbuilder.scala
index ef85fc02a6..a88ea38b92 100644
--- a/test/files/run/stringbuilder.scala
+++ b/test/files/run/stringbuilder.scala
@@ -1,3 +1,6 @@
+
+import scala.language.reflectiveCalls
+
object Test extends App {
val str = "ABCDEFGHIJKLMABCDEFGHIJKLM"
val surrogateStr = "an old Turkic letter: \uD803\uDC22"
@@ -33,8 +36,8 @@ object Test extends App {
sameAnswers(_.lastIndexOf("KLM", 22))
// testing that the "reverse" implementation avoids reversing surrogate pairs
- val jsb = new JavaStringBuilder(surrogateStr) reverse
- val ssb = new ScalaStringBuilder(surrogateStr) reverseContents ;
+ val jsb = new JavaStringBuilder(surrogateStr).reverse
+ val ssb = new ScalaStringBuilder(surrogateStr).reverseContents
assert(jsb.toString == ssb.toString)
}
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
index 9c59c334f8..1138cd0860 100644
--- a/test/files/run/stringinterpolation_macro-run.scala
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
object Test extends App {
// 'b' / 'B' (category: general)
diff --git a/test/files/run/structural.scala b/test/files/run/structural.scala
index 3a703d2cf1..2788717ec2 100644
--- a/test/files/run/structural.scala
+++ b/test/files/run/structural.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
object test1 {
val o1 = new Object { override def toString = "ohone" }
@@ -164,7 +167,7 @@ object test3 {
}
catch {
case e: Exc => println("caught")
- case e => println(e)
+ case e: Throwable => println(e)
}
m(Rec)
diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check
index dd9f4ef424..6e99739633 100644
--- a/test/files/run/synchronized.check
+++ b/test/files/run/synchronized.check
@@ -1,3 +1,4 @@
+warning: there were 14 inliner warning(s); re-run with -Yinline-warnings for details
.|. c1.f1: OK
.|. c1.fi: OK
.|... c1.fv: OK
diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags
index 1182725e86..49d036a887 100644
--- a/test/files/run/synchronized.flags
+++ b/test/files/run/synchronized.flags
@@ -1 +1 @@
--optimize \ No newline at end of file
+-optimize
diff --git a/test/files/run/t0325.scala b/test/files/run/t0325.scala
index 92331ab05f..048303206c 100644
--- a/test/files/run/t0325.scala
+++ b/test/files/run/t0325.scala
@@ -24,7 +24,7 @@ object Test {
else
println(ret)
} catch {
- case e@_ => println(which + " failed with " + e.getClass)
+ case e: Throwable => println(which + " failed with " + e.getClass)
}
}
diff --git a/test/files/run/t0421-old.scala b/test/files/run/t0421-old.scala
index 8d51013924..9b88b4789b 100644
--- a/test/files/run/t0421-old.scala
+++ b/test/files/run/t0421-old.scala
@@ -1,4 +1,6 @@
// ticket #421
+
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
def transpose[A: ClassManifest](xss: Array[Array[A]]) = {
diff --git a/test/files/run/t0432.scala b/test/files/run/t0432.scala
index 8ba9015d81..b860a0874f 100644
--- a/test/files/run/t0432.scala
+++ b/test/files/run/t0432.scala
@@ -1,3 +1,6 @@
+
+import scala.language.reflectiveCalls
+
object Test {
type valueType = { def value: this.type }
diff --git a/test/files/run/t0528.scala b/test/files/run/t0528.scala
index a76f602ed3..68a9975173 100644
--- a/test/files/run/t0528.scala
+++ b/test/files/run/t0528.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ existentials }
trait Sequ[A] {
def toArray: Array[T forSome {type T <: A}]
}
diff --git a/test/files/run/t0677-old.scala b/test/files/run/t0677-old.scala
index 6c8a3a7e99..c0f1abae53 100644
--- a/test/files/run/t0677-old.scala
+++ b/test/files/run/t0677-old.scala
@@ -1,3 +1,6 @@
+
+
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
class X[T: ClassManifest] {
val a = Array.ofDim[T](3, 4)
diff --git a/test/files/run/t1005.scala b/test/files/run/t1005.scala
index 60129bcc51..575ec43565 100644
--- a/test/files/run/t1005.scala
+++ b/test/files/run/t1005.scala
@@ -1,19 +1,20 @@
+import scala.language.postfixOps
object Test
{
class Foo[T](x : Array[AnyRef]) { def bar = x.asInstanceOf[Array[T]] }
class Bar[T](x : Array[T]) { def bar = x.asInstanceOf[Array[AnyRef]] }
object FromMono{
- def main(args : Array[String]) = (new Foo[AnyRef](Array[AnyRef]("Halp!"))).bar
+ def mainer(args : Array[String]) = (new Foo[AnyRef](Array[AnyRef]("Halp!"))).bar
}
object FromPoly{
- def main(args : Array[String]) = (new Bar[AnyRef](Array[AnyRef]("Halp!"))).bar
+ def mainer(args : Array[String]) = (new Bar[AnyRef](Array[AnyRef]("Halp!"))).bar
}
def main(args: Array[String]): Unit = {
- println(FromMono main null mkString)
- println(FromPoly main null mkString)
+ println(FromMono mainer null mkString)
+ println(FromPoly mainer null mkString)
}
}
diff --git a/test/files/run/t1110.scala b/test/files/run/t1110.scala
index 824643868c..81917789c8 100644
--- a/test/files/run/t1110.scala
+++ b/test/files/run/t1110.scala
@@ -1,8 +1,12 @@
+
+
+import scala.language.{ reflectiveCalls }
+
class Stuff {
def zoop(p: Any{def &(q: Int): Int}) = p & 7
def floop = new { def & = "Hello" }
- assert((floop &) == "Hello")
+ assert((floop.&) == "Hello")
assert(zoop(10) == 2)
}
diff --git a/test/files/run/t1141.scala b/test/files/run/t1141.scala
index ee4f2e7fd2..b907b4798f 100644
--- a/test/files/run/t1141.scala
+++ b/test/files/run/t1141.scala
@@ -1,3 +1,7 @@
+
+
+import scala.language.reflectiveCalls
+
object Test extends App {
val foo = new {
def apply(args : String*) = args foreach println
diff --git a/test/files/run/t1195-new.scala b/test/files/run/t1195-new.scala
index 0f62b140c9..4f068c7d42 100644
--- a/test/files/run/t1195-new.scala
+++ b/test/files/run/t1195-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ existentials }
import scala.reflect.runtime.universe._
object Test {
@@ -25,4 +27,4 @@ class A1[T] {
class B1[U] {
def f = { case class D(x: Int) extends A1[String] ; new D(5) }
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t1195-old.scala b/test/files/run/t1195-old.scala
index b46a3b70f5..f80734c228 100644
--- a/test/files/run/t1195-old.scala
+++ b/test/files/run/t1195-old.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ existentials }
+
object Test {
def f() = { case class Bar(x: Int); Bar }
def g() = { case class Bar(x: Int); Bar(5) }
diff --git a/test/files/run/t1368.check b/test/files/run/t1368.check
new file mode 100644
index 0000000000..581e8a4039
--- /dev/null
+++ b/test/files/run/t1368.check
@@ -0,0 +1,3 @@
+t1368.scala:7: warning: Reference to uninitialized value blurp
+ def go3 = (new AnyRef with Happy with Sad { override val status = blurp ; val blurp = "happysad" }).status
+ ^
diff --git a/test/files/run/t1427.check b/test/files/run/t1427.check
new file mode 100644
index 0000000000..11a3d2f4a9
--- /dev/null
+++ b/test/files/run/t1427.check
@@ -0,0 +1,3 @@
+t1427.scala:6: warning: abstract type X in type pattern Bob[_[_] <: Any] is unchecked since it is eliminated by erasure
+ case x: (Bob[X] forSome { type X[_] }) => true
+ ^
diff --git a/test/files/run/t1427.scala b/test/files/run/t1427.scala
index 21bd71ffef..eeb2603c26 100644
--- a/test/files/run/t1427.scala
+++ b/test/files/run/t1427.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
class Bob[K[_]] {
def foo(other: Any) = other match {
case x: (Bob[X] forSome { type X[_] }) => true
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index 6d2e7ee05f..75a6e31cdf 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -16,7 +16,7 @@ object Test {
</code>.text
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
index a2f7bb3a65..71ad0aeb5c 100644
--- a/test/files/run/t1501.scala
+++ b/test/files/run/t1501.scala
@@ -27,7 +27,7 @@ object Test {
</code>.text
- def main(args: Array[String]) = {
+ def main(args: Array[String]) {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
val tool = new interpreter.IMain(settings)
diff --git a/test/files/run/t153.scala b/test/files/run/t153.scala
index 82492fdbe6..3fdb423865 100644
--- a/test/files/run/t153.scala
+++ b/test/files/run/t153.scala
@@ -1,5 +1,5 @@
object Test extends App {
def powers(x: Int) = if ((x&(x-1))==0) Some(x) else None
val res = (Stream.range(1, 500000) flatMap powers).reverse
- println(res take 42 force)
+ println((res take 42).force)
}
diff --git a/test/files/run/t1766.scala b/test/files/run/t1766.scala
index 2afd883755..7d12451092 100644
--- a/test/files/run/t1766.scala
+++ b/test/files/run/t1766.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
object Test extends App {
class C(s: String) {
diff --git a/test/files/run/t2106.check b/test/files/run/t2106.check
new file mode 100644
index 0000000000..b14e9d1c41
--- /dev/null
+++ b/test/files/run/t2106.check
@@ -0,0 +1,3 @@
+t2106.scala:7: warning: Could not inline required method foo because access level required by callee not matched by caller.
+ def main(args: Array[String]): Unit = x.foo
+ ^
diff --git a/test/files/run/t2106.flags b/test/files/run/t2106.flags
index eb4d19bcb9..00d3643fd4 100644
--- a/test/files/run/t2106.flags
+++ b/test/files/run/t2106.flags
@@ -1 +1 @@
--optimise \ No newline at end of file
+-optimise -Yinline-warnings
diff --git a/test/files/run/t2106.scala b/test/files/run/t2106.scala
index e8124dabab..55b89da805 100644
--- a/test/files/run/t2106.scala
+++ b/test/files/run/t2106.scala
@@ -4,5 +4,5 @@ class A extends Cloneable {
object Test {
val x = new A
- def main(args: Array[String]) = x.foo
+ def main(args: Array[String]): Unit = x.foo
}
diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check
index 42b0be457a..4231fc6ea6 100644
--- a/test/files/run/t2251b.check
+++ b/test/files/run/t2251b.check
@@ -1,11 +1,11 @@
-TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def dropRight(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def takeRight(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def drop(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def take(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def splitAt(n: Int): (scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A], scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A])}]]
+TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with java.io.Serializable]]
TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]]
-TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def takeRight(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def drop(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def take(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def slice(from: Int,until: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def splitAt(n: Int): (scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A], scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]); def init: scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}}]]
+TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with Serializable]]
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]]
-TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def drop(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def take(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def slice(from: Int,until: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}}]]
+TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with java.io.Serializable]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
diff --git a/test/files/run/t2308a.scala b/test/files/run/t2308a.scala
index abb568064a..200dee1ef6 100644
--- a/test/files/run/t2308a.scala
+++ b/test/files/run/t2308a.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ higherKinds }
object Test {
trait T[M[_]]
diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala
index 47d083eb9d..d7fd9fdd41 100644
--- a/test/files/run/t2318.scala
+++ b/test/files/run/t2318.scala
@@ -1,5 +1,7 @@
import java.security._
+import scala.language.{ reflectiveCalls }
+
object Test {
trait Bar { def bar: Unit }
diff --git a/test/files/run/t2333.scala b/test/files/run/t2333.scala
index da43386572..7dc7a92df2 100644
--- a/test/files/run/t2333.scala
+++ b/test/files/run/t2333.scala
@@ -1,7 +1,7 @@
class A {
def whatever() {
lazy val a = 1
- lazy val b = try { 2 } catch { case _ => 0 }
+ lazy val b = try { 2 } catch { case _: Throwable => 0 }
a
b
@@ -13,4 +13,4 @@ object Test {
val a = new A
a.whatever
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t2417.scala b/test/files/run/t2417.scala
index 2d0bc2d405..18b6c326b5 100644
--- a/test/files/run/t2417.scala
+++ b/test/files/run/t2417.scala
@@ -8,7 +8,7 @@ object Test {
try {
block
} catch {
- case x => failure = x
+ case x: Throwable => failure = x
}
}
})
diff --git a/test/files/run/t2464/Annotated.java b/test/files/run/t2464/Annotated.java
new file mode 100644
index 0000000000..d022f9852c
--- /dev/null
+++ b/test/files/run/t2464/Annotated.java
@@ -0,0 +1,5 @@
+package test;
+
+@Connect(loadStyle = Connect.LoadStyle.EAGER)
+public class Annotated {
+}
diff --git a/test/files/run/t2464/Connect.java b/test/files/run/t2464/Connect.java
new file mode 100644
index 0000000000..59349f94c8
--- /dev/null
+++ b/test/files/run/t2464/Connect.java
@@ -0,0 +1,20 @@
+package test;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface Connect {
+
+ LoadStyle loadStyle() default LoadStyle.EAGER;
+
+ public enum LoadStyle {
+ EAGER,
+ DEFERRED,
+ LAZY
+ }
+}
diff --git a/test/files/run/t2464/Test.scala b/test/files/run/t2464/Test.scala
new file mode 100644
index 0000000000..90e1a03c17
--- /dev/null
+++ b/test/files/run/t2464/Test.scala
@@ -0,0 +1,35 @@
+import scala.reflect.io.Streamable
+import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.tree.ClassNode
+import scala.tools.partest._
+import scala.tools.partest.BytecodeTest.modifyClassFile
+import java.io.{FileOutputStream, FileInputStream, File}
+
+object Test extends DirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def app = """
+ object O {
+ new test.Annotated
+ }
+ """
+
+ def show(): Unit = {
+ compileCode(app)
+ modifyClassFile(new File(testOutput.toFile, "test/Annotated.class")) {
+ (cn: ClassNode) =>
+ // As investigated https://issues.scala-lang.org/browse/SI-2464?focusedCommentId=64521&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-64521
+ // classfiles in the wild sometimes lack the required InnerClass attribute for nested enums that
+ // are referenced in an annotation. I don't know what compiler or bytecode processor leaves things
+ // that way, but this test makes sure we don't crash.
+ cn.innerClasses.clear()
+ cn
+ }
+ compileCode(app)
+ }
+}
diff --git a/test/files/run/t2514.scala b/test/files/run/t2514.scala
index 21c4afb472..cf5fa0ea02 100644
--- a/test/files/run/t2514.scala
+++ b/test/files/run/t2514.scala
@@ -1,3 +1,7 @@
+
+
+import scala.language.{ implicitConversions, postfixOps, reflectiveCalls }
+
object Test
{
implicit def x[A](a: A) = new { def xx = a }
@@ -12,4 +16,4 @@ object Test
assert(r5 == 24)
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t2594_tcpoly.scala b/test/files/run/t2594_tcpoly.scala
index e759ca8b0f..a9d26693f9 100644
--- a/test/files/run/t2594_tcpoly.scala
+++ b/test/files/run/t2594_tcpoly.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
trait Monad[M[_]] {
def foo[A](a: M[A]): M[A]
}
@@ -15,4 +18,4 @@ object Test {
}
def main(as: Array[String]) { BarMonad[Int] foo (new Bar[Int, Int]) }
-} \ No newline at end of file
+}
diff --git a/test/files/run/t2636.scala b/test/files/run/t2636.scala
index 3271f79ffc..6ae2248a26 100644
--- a/test/files/run/t2636.scala
+++ b/test/files/run/t2636.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
object Test
{
type Foo = { def update(x: Int, value: String): Unit }
@@ -32,4 +35,4 @@ object Test
assert(arrApply(arr, 1) == "o")
assert(arrApply(new { def apply(x: Int) = "tom" }, -100) == "tom")
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t3038d.scala b/test/files/run/t3038d.scala
index 9550165235..56cfcdb174 100644
--- a/test/files/run/t3038d.scala
+++ b/test/files/run/t3038d.scala
@@ -21,7 +21,7 @@ class Bar extends Foo with Serializable {
def size = a
@transient var second: Any = null
- def checkMember { first }
+ def checkMember { if (first == null) print("") }
private def writeObject(out: java.io.ObjectOutputStream) {
serializeTo(out)
diff --git a/test/files/run/t3050.scala b/test/files/run/t3050.scala
index ca9d91e191..65b2674b69 100644
--- a/test/files/run/t3050.scala
+++ b/test/files/run/t3050.scala
@@ -2,7 +2,7 @@ object Test {
def main(args: Array[String]): Unit = {
val x =
try { ("": Any) match { case List(_*) => true } }
- catch { case _ => false }
+ catch { case _: Throwable => false }
assert(!x)
}
diff --git a/test/files/run/t3175.scala b/test/files/run/t3175.scala
index aff2e67d0d..5c6daead38 100644
--- a/test/files/run/t3175.scala
+++ b/test/files/run/t3175.scala
@@ -1,6 +1,9 @@
/** A bit down the road this test will examine
* the bytecode.
*/
+
+import scala.language.reflectiveCalls
+
object Test {
def len(x:{ def length: Int }) = x.length
def f1(x:{ def apply(x: Int): Long }) = x(0)
diff --git a/test/files/run/t3232.scala b/test/files/run/t3232.scala
index feff7e7089..4c6cb1e35c 100644
--- a/test/files/run/t3232.scala
+++ b/test/files/run/t3232.scala
@@ -15,7 +15,7 @@ object Test {
// exception required
List(e1, e2) foreach { f =>
try { f() ; assert(false) }
- catch { case _ => () }
+ catch { case _: Throwable => () }
}
}
}
diff --git a/test/files/run/t3242.scala b/test/files/run/t3242.scala
index f8defaa5cd..0a449d51f9 100644
--- a/test/files/run/t3242.scala
+++ b/test/files/run/t3242.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
object Test {
def benchmarkA(num: Int) {
diff --git a/test/files/run/t3361.scala b/test/files/run/t3361.scala
index 7fbc6777f2..f739d17b86 100644
--- a/test/files/run/t3361.scala
+++ b/test/files/run/t3361.scala
@@ -57,7 +57,7 @@ object Test extends App {
DoubleLinkedList().insert(ten)
} catch {
case _: IllegalArgumentException => require(true)
- case _ => require(false)
+ case _: Throwable => require(false)
}
val zero = DoubleLinkedList(0)
zero.insert(ten)
@@ -87,7 +87,7 @@ object Test extends App {
DoubleLinkedList().append(ten)
} catch {
case _: IllegalArgumentException => require(true)
- case _ => require(false)
+ case _: Throwable => require(false)
}
val zero = DoubleLinkedList(0)
zero.append(ten)
diff --git a/test/files/run/t3425.check b/test/files/run/t3425.check
new file mode 100644
index 0000000000..5be779bd74
--- /dev/null
+++ b/test/files/run/t3425.check
@@ -0,0 +1,4 @@
+123
+456
+789
+789
diff --git a/test/files/run/t3425.scala b/test/files/run/t3425.scala
new file mode 100644
index 0000000000..c61d1071a5
--- /dev/null
+++ b/test/files/run/t3425.scala
@@ -0,0 +1,41 @@
+import scala.language.reflectiveCalls
+object Other {
+ abstract class Foo {
+ type R1 <: { def x: Any }
+ type R2 <: R1 { def x: Int }
+
+ def f(x: R2) = x.x
+ }
+
+ abstract class Bar {
+ trait R0 { def x: Any }
+ type R1 <: R0 { def x: AnyVal }
+ type R2 <: R1 { def x: Int }
+
+ def f(x: R2) = x.x
+ }
+}
+object Test {
+ trait A
+ trait B
+ def x(a: (A { val y: Int }) with B { val y: Int }) = a.y
+
+ class C extends A with B {
+ val y = 456
+ }
+
+ class Bippy { def x: Int = 789 }
+
+ def main(args: Array[String]): Unit = {
+ println(x(new A with B { val y = 123 }))
+ println(x(new C))
+
+ { val foo = new Other.Foo { type R1 = Bippy ; type R2 = Bippy }
+ println(foo.f(new Bippy))
+ }
+ { val bar = new Other.Bar { type R1 = Bippy with R0 ; type R2 = R1 }
+ println(bar.f(new Bippy with bar.R0))
+ }
+ }
+}
+
diff --git a/test/files/run/t3425b.check b/test/files/run/t3425b.check
new file mode 100644
index 0000000000..5d34c43de3
--- /dev/null
+++ b/test/files/run/t3425b.check
@@ -0,0 +1,152 @@
+==== Direct Calls ====
+
+Any{val y: P} with C{val y: P}
+Any{val y: P} with C{val y: Q}
+Any{val y: P} with C{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with C{val y: P}
+Any{val y: Q} with C{val y: Q}
+Any{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with C{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+A{val y: P} with C{val y: P}
+A{val y: P} with C{val y: Q}
+A{val y: P} with C{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with C{val y: P}
+A{val y: Q} with C{val y: Q}
+A{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with C{val y: P}
+A{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+B{val y: P} with C{val y: P}
+B{val y: P} with C{val y: Q}
+B{val y: P} with C{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with C{val y: P}
+B{val y: Q} with C{val y: Q}
+B{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with C{val y: P}
+B{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+C{val y: P} with C{val y: P}
+C{val y: P} with C{val y: Q}
+C{val y: P} with C{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with C{val y: P}
+C{val y: Q} with C{val y: Q}
+C{val y: Q} with C{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with C{val y: P}
+C{val y: R forSome { type R <: P with Q }} with C{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with C{val y: R forSome { type R <: P with Q }}
+
+
+==== Reflective Calls ====
+
+Any{val y: P} with Any{val y: P}
+Any{val y: P} with Any{val y: Q}
+Any{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+Any{val y: P} with A{val y: P}
+Any{val y: P} with A{val y: Q}
+Any{val y: P} with A{val y: R forSome { type R <: P with Q }}
+Any{val y: P} with B{val y: P}
+Any{val y: P} with B{val y: Q}
+Any{val y: P} with B{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with Any{val y: P}
+Any{val y: Q} with Any{val y: Q}
+Any{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with A{val y: P}
+Any{val y: Q} with A{val y: Q}
+Any{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+Any{val y: Q} with B{val y: P}
+Any{val y: Q} with B{val y: Q}
+Any{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with A{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+Any{val y: R forSome { type R <: P with Q }} with B{val y: P}
+Any{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+Any{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+A{val y: P} with Any{val y: P}
+A{val y: P} with Any{val y: Q}
+A{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+A{val y: P} with A{val y: P}
+A{val y: P} with A{val y: Q}
+A{val y: P} with A{val y: R forSome { type R <: P with Q }}
+A{val y: P} with B{val y: P}
+A{val y: P} with B{val y: Q}
+A{val y: P} with B{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with Any{val y: P}
+A{val y: Q} with Any{val y: Q}
+A{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with A{val y: P}
+A{val y: Q} with A{val y: Q}
+A{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+A{val y: Q} with B{val y: P}
+A{val y: Q} with B{val y: Q}
+A{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+A{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with A{val y: P}
+A{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+A{val y: R forSome { type R <: P with Q }} with B{val y: P}
+A{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+A{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+B{val y: P} with Any{val y: P}
+B{val y: P} with Any{val y: Q}
+B{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+B{val y: P} with A{val y: P}
+B{val y: P} with A{val y: Q}
+B{val y: P} with A{val y: R forSome { type R <: P with Q }}
+B{val y: P} with B{val y: P}
+B{val y: P} with B{val y: Q}
+B{val y: P} with B{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with Any{val y: P}
+B{val y: Q} with Any{val y: Q}
+B{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with A{val y: P}
+B{val y: Q} with A{val y: Q}
+B{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+B{val y: Q} with B{val y: P}
+B{val y: Q} with B{val y: Q}
+B{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+B{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with A{val y: P}
+B{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+B{val y: R forSome { type R <: P with Q }} with B{val y: P}
+B{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+B{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+C{val y: P} with Any{val y: P}
+C{val y: P} with Any{val y: Q}
+C{val y: P} with Any{val y: R forSome { type R <: P with Q }}
+C{val y: P} with A{val y: P}
+C{val y: P} with A{val y: Q}
+C{val y: P} with A{val y: R forSome { type R <: P with Q }}
+C{val y: P} with B{val y: P}
+C{val y: P} with B{val y: Q}
+C{val y: P} with B{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with Any{val y: P}
+C{val y: Q} with Any{val y: Q}
+C{val y: Q} with Any{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with A{val y: P}
+C{val y: Q} with A{val y: Q}
+C{val y: Q} with A{val y: R forSome { type R <: P with Q }}
+C{val y: Q} with B{val y: P}
+C{val y: Q} with B{val y: Q}
+C{val y: Q} with B{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with Any{val y: P}
+C{val y: R forSome { type R <: P with Q }} with Any{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with Any{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with A{val y: P}
+C{val y: R forSome { type R <: P with Q }} with A{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with A{val y: R forSome { type R <: P with Q }}
+C{val y: R forSome { type R <: P with Q }} with B{val y: P}
+C{val y: R forSome { type R <: P with Q }} with B{val y: Q}
+C{val y: R forSome { type R <: P with Q }} with B{val y: R forSome { type R <: P with Q }}
+
+
diff --git a/test/files/run/t3425b/Base_1.scala b/test/files/run/t3425b/Base_1.scala
new file mode 100644
index 0000000000..5a660a89b2
--- /dev/null
+++ b/test/files/run/t3425b/Base_1.scala
@@ -0,0 +1,89 @@
+trait P { def reflected: Boolean }
+trait Q { def reflected: Boolean }
+class PQ(val reflected: Boolean) extends P with Q { }
+
+trait A
+trait B
+trait C { val y: P }
+class ABC extends A with B with C {
+ private def reflected = (
+ Thread.currentThread.getStackTrace
+ takeWhile (_.getMethodName != "main")
+ exists (_.toString contains "sun.reflect.")
+ )
+ lazy val y: PQ = new PQ(reflected)
+}
+
+/*** The source used to generate the second file
+ Not otherwise used in the test except that compiling
+ it helps make sure it still compiles.
+
+****/
+
+object Gen {
+ case class Tp(outer: String, elem: String) {
+ override def toString = s"$outer { val y: $elem }"
+ }
+ case class Pair(tp1: Tp, tp2: Tp) {
+ def expr = s"((new ABC): $tp)"
+ def tp = s"($tp1) with ($tp2)"
+ }
+ val traits = Vector("Any", "A", "B", "C") map ("%6s" format _)
+ val types = Vector("P", "Q", "R forSome { type R <: P with Q }")
+ val allTypes = for (c <- traits ; tp <- types) yield Tp(c, tp)
+ val pairs = allTypes flatMap (t1 => allTypes map (t2 => Pair(t1, t2)))
+ val indices = pairs.indices
+
+ def aliases(idx: Int) = {
+ val p = pairs(idx)
+ import p._
+ List(
+ s"type R1_$idx = $tp",
+ s"type R2_$idx = R1_$idx { val y: (${tp1.elem}) with (${tp2.elem}) }"
+ )
+ }
+
+ def mkMethodContent(pre: String)(f: Int => String) =
+ indices map (i => s"def $pre$i${f(i)}") mkString "\n "
+
+ def content = List(
+ indices flatMap aliases mkString "\n ",
+ mkMethodContent("f")(i => s" = { val x = ${pairs(i).expr} ; x.y.reflected -> whatis(x).toString }"),
+ mkMethodContent("g")(i => s"""(x: R1_$i) = x.y"""),
+ mkMethodContent("h")(i => s"""(x: R2_$i) = x.y""")
+ ) mkString "\n "
+
+ def fCalls = indices map ("f" + _) mkString ("\n ", ",\n ", "\n ")
+
+ def main(args: Array[String]): Unit = {
+ // One cannot attain proper appreciation for the inadequacies of
+ // string interpolation without becoming one with the newline.
+ val nl = "\\n"
+
+ println(s"""
+ |import scala.reflect.runtime.universe._
+ |import scala.language._
+ |
+ |object Test {
+ | def whatis[T: TypeTag](x: T) = typeOf[T]
+ | def sshow(label: String, xs: Traversable[Any]) {
+ | println("==== " + label + " ====$nl")
+ | xs.toList.map("" + _).sorted foreach println
+ | println("$nl")
+ | }
+ |
+ | $content
+ | lazy val fcalls = List($fCalls)
+ |
+ | def main(args: Array[String]) {
+ | sshow("Direct Calls", fcalls collect { case (false, n) => n })
+ | sshow("Reflective Calls", fcalls collect { case (true, n) => n })
+ | // For a good time try printing this - have to fix bugs in
+ | // reflection before that's going to be a good idea
+ | // println(typeOf[Test.type].typeSymbol.asClass.typeSignature)
+ | }
+ |}
+ """.stripMargin.trim
+ )
+ }
+}
diff --git a/test/files/run/t3425b/Generated_2.scala b/test/files/run/t3425b/Generated_2.scala
new file mode 100644
index 0000000000..f1699636f6
--- /dev/null
+++ b/test/files/run/t3425b/Generated_2.scala
@@ -0,0 +1,886 @@
+import scala.reflect.runtime.universe._
+import scala.language._
+
+object Test {
+ def whatis[T: TypeTag](x: T) = typeOf[T]
+ def sshow(label: String, xs: Traversable[Any]) {
+ println("==== " + label + " ====\n")
+ xs.toList.map("" + _).sorted foreach println
+ println("\n")
+ }
+
+ type R1_0 = ( Any { val y: P }) with ( Any { val y: P })
+ type R2_0 = R1_0 { val y: (P) with (P) }
+ type R1_1 = ( Any { val y: P }) with ( Any { val y: Q })
+ type R2_1 = R1_1 { val y: (P) with (Q) }
+ type R1_2 = ( Any { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_2 = R1_2 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_3 = ( Any { val y: P }) with ( A { val y: P })
+ type R2_3 = R1_3 { val y: (P) with (P) }
+ type R1_4 = ( Any { val y: P }) with ( A { val y: Q })
+ type R2_4 = R1_4 { val y: (P) with (Q) }
+ type R1_5 = ( Any { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_5 = R1_5 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_6 = ( Any { val y: P }) with ( B { val y: P })
+ type R2_6 = R1_6 { val y: (P) with (P) }
+ type R1_7 = ( Any { val y: P }) with ( B { val y: Q })
+ type R2_7 = R1_7 { val y: (P) with (Q) }
+ type R1_8 = ( Any { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_8 = R1_8 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_9 = ( Any { val y: P }) with ( C { val y: P })
+ type R2_9 = R1_9 { val y: (P) with (P) }
+ type R1_10 = ( Any { val y: P }) with ( C { val y: Q })
+ type R2_10 = R1_10 { val y: (P) with (Q) }
+ type R1_11 = ( Any { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_11 = R1_11 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_12 = ( Any { val y: Q }) with ( Any { val y: P })
+ type R2_12 = R1_12 { val y: (Q) with (P) }
+ type R1_13 = ( Any { val y: Q }) with ( Any { val y: Q })
+ type R2_13 = R1_13 { val y: (Q) with (Q) }
+ type R1_14 = ( Any { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_14 = R1_14 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_15 = ( Any { val y: Q }) with ( A { val y: P })
+ type R2_15 = R1_15 { val y: (Q) with (P) }
+ type R1_16 = ( Any { val y: Q }) with ( A { val y: Q })
+ type R2_16 = R1_16 { val y: (Q) with (Q) }
+ type R1_17 = ( Any { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_17 = R1_17 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_18 = ( Any { val y: Q }) with ( B { val y: P })
+ type R2_18 = R1_18 { val y: (Q) with (P) }
+ type R1_19 = ( Any { val y: Q }) with ( B { val y: Q })
+ type R2_19 = R1_19 { val y: (Q) with (Q) }
+ type R1_20 = ( Any { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_20 = R1_20 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_21 = ( Any { val y: Q }) with ( C { val y: P })
+ type R2_21 = R1_21 { val y: (Q) with (P) }
+ type R1_22 = ( Any { val y: Q }) with ( C { val y: Q })
+ type R2_22 = R1_22 { val y: (Q) with (Q) }
+ type R1_23 = ( Any { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_23 = R1_23 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_24 = ( Any { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })
+ type R2_24 = R1_24 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_25 = ( Any { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })
+ type R2_25 = R1_25 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_26 = ( Any { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_26 = R1_26 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_27 = ( Any { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })
+ type R2_27 = R1_27 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_28 = ( Any { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })
+ type R2_28 = R1_28 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_29 = ( Any { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_29 = R1_29 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_30 = ( Any { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })
+ type R2_30 = R1_30 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_31 = ( Any { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })
+ type R2_31 = R1_31 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_32 = ( Any { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_32 = R1_32 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_33 = ( Any { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })
+ type R2_33 = R1_33 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_34 = ( Any { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })
+ type R2_34 = R1_34 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_35 = ( Any { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_35 = R1_35 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_36 = ( A { val y: P }) with ( Any { val y: P })
+ type R2_36 = R1_36 { val y: (P) with (P) }
+ type R1_37 = ( A { val y: P }) with ( Any { val y: Q })
+ type R2_37 = R1_37 { val y: (P) with (Q) }
+ type R1_38 = ( A { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_38 = R1_38 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_39 = ( A { val y: P }) with ( A { val y: P })
+ type R2_39 = R1_39 { val y: (P) with (P) }
+ type R1_40 = ( A { val y: P }) with ( A { val y: Q })
+ type R2_40 = R1_40 { val y: (P) with (Q) }
+ type R1_41 = ( A { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_41 = R1_41 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_42 = ( A { val y: P }) with ( B { val y: P })
+ type R2_42 = R1_42 { val y: (P) with (P) }
+ type R1_43 = ( A { val y: P }) with ( B { val y: Q })
+ type R2_43 = R1_43 { val y: (P) with (Q) }
+ type R1_44 = ( A { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_44 = R1_44 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_45 = ( A { val y: P }) with ( C { val y: P })
+ type R2_45 = R1_45 { val y: (P) with (P) }
+ type R1_46 = ( A { val y: P }) with ( C { val y: Q })
+ type R2_46 = R1_46 { val y: (P) with (Q) }
+ type R1_47 = ( A { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_47 = R1_47 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_48 = ( A { val y: Q }) with ( Any { val y: P })
+ type R2_48 = R1_48 { val y: (Q) with (P) }
+ type R1_49 = ( A { val y: Q }) with ( Any { val y: Q })
+ type R2_49 = R1_49 { val y: (Q) with (Q) }
+ type R1_50 = ( A { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_50 = R1_50 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_51 = ( A { val y: Q }) with ( A { val y: P })
+ type R2_51 = R1_51 { val y: (Q) with (P) }
+ type R1_52 = ( A { val y: Q }) with ( A { val y: Q })
+ type R2_52 = R1_52 { val y: (Q) with (Q) }
+ type R1_53 = ( A { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_53 = R1_53 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_54 = ( A { val y: Q }) with ( B { val y: P })
+ type R2_54 = R1_54 { val y: (Q) with (P) }
+ type R1_55 = ( A { val y: Q }) with ( B { val y: Q })
+ type R2_55 = R1_55 { val y: (Q) with (Q) }
+ type R1_56 = ( A { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_56 = R1_56 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_57 = ( A { val y: Q }) with ( C { val y: P })
+ type R2_57 = R1_57 { val y: (Q) with (P) }
+ type R1_58 = ( A { val y: Q }) with ( C { val y: Q })
+ type R2_58 = R1_58 { val y: (Q) with (Q) }
+ type R1_59 = ( A { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_59 = R1_59 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_60 = ( A { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })
+ type R2_60 = R1_60 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_61 = ( A { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })
+ type R2_61 = R1_61 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_62 = ( A { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_62 = R1_62 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_63 = ( A { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })
+ type R2_63 = R1_63 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_64 = ( A { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })
+ type R2_64 = R1_64 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_65 = ( A { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_65 = R1_65 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_66 = ( A { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })
+ type R2_66 = R1_66 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_67 = ( A { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })
+ type R2_67 = R1_67 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_68 = ( A { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_68 = R1_68 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_69 = ( A { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })
+ type R2_69 = R1_69 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_70 = ( A { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })
+ type R2_70 = R1_70 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_71 = ( A { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_71 = R1_71 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_72 = ( B { val y: P }) with ( Any { val y: P })
+ type R2_72 = R1_72 { val y: (P) with (P) }
+ type R1_73 = ( B { val y: P }) with ( Any { val y: Q })
+ type R2_73 = R1_73 { val y: (P) with (Q) }
+ type R1_74 = ( B { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_74 = R1_74 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_75 = ( B { val y: P }) with ( A { val y: P })
+ type R2_75 = R1_75 { val y: (P) with (P) }
+ type R1_76 = ( B { val y: P }) with ( A { val y: Q })
+ type R2_76 = R1_76 { val y: (P) with (Q) }
+ type R1_77 = ( B { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_77 = R1_77 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_78 = ( B { val y: P }) with ( B { val y: P })
+ type R2_78 = R1_78 { val y: (P) with (P) }
+ type R1_79 = ( B { val y: P }) with ( B { val y: Q })
+ type R2_79 = R1_79 { val y: (P) with (Q) }
+ type R1_80 = ( B { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_80 = R1_80 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_81 = ( B { val y: P }) with ( C { val y: P })
+ type R2_81 = R1_81 { val y: (P) with (P) }
+ type R1_82 = ( B { val y: P }) with ( C { val y: Q })
+ type R2_82 = R1_82 { val y: (P) with (Q) }
+ type R1_83 = ( B { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_83 = R1_83 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_84 = ( B { val y: Q }) with ( Any { val y: P })
+ type R2_84 = R1_84 { val y: (Q) with (P) }
+ type R1_85 = ( B { val y: Q }) with ( Any { val y: Q })
+ type R2_85 = R1_85 { val y: (Q) with (Q) }
+ type R1_86 = ( B { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_86 = R1_86 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_87 = ( B { val y: Q }) with ( A { val y: P })
+ type R2_87 = R1_87 { val y: (Q) with (P) }
+ type R1_88 = ( B { val y: Q }) with ( A { val y: Q })
+ type R2_88 = R1_88 { val y: (Q) with (Q) }
+ type R1_89 = ( B { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_89 = R1_89 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_90 = ( B { val y: Q }) with ( B { val y: P })
+ type R2_90 = R1_90 { val y: (Q) with (P) }
+ type R1_91 = ( B { val y: Q }) with ( B { val y: Q })
+ type R2_91 = R1_91 { val y: (Q) with (Q) }
+ type R1_92 = ( B { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_92 = R1_92 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_93 = ( B { val y: Q }) with ( C { val y: P })
+ type R2_93 = R1_93 { val y: (Q) with (P) }
+ type R1_94 = ( B { val y: Q }) with ( C { val y: Q })
+ type R2_94 = R1_94 { val y: (Q) with (Q) }
+ type R1_95 = ( B { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_95 = R1_95 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_96 = ( B { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })
+ type R2_96 = R1_96 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_97 = ( B { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })
+ type R2_97 = R1_97 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_98 = ( B { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_98 = R1_98 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_99 = ( B { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })
+ type R2_99 = R1_99 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_100 = ( B { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })
+ type R2_100 = R1_100 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_101 = ( B { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_101 = R1_101 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_102 = ( B { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })
+ type R2_102 = R1_102 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_103 = ( B { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })
+ type R2_103 = R1_103 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_104 = ( B { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_104 = R1_104 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_105 = ( B { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })
+ type R2_105 = R1_105 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_106 = ( B { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })
+ type R2_106 = R1_106 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_107 = ( B { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_107 = R1_107 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_108 = ( C { val y: P }) with ( Any { val y: P })
+ type R2_108 = R1_108 { val y: (P) with (P) }
+ type R1_109 = ( C { val y: P }) with ( Any { val y: Q })
+ type R2_109 = R1_109 { val y: (P) with (Q) }
+ type R1_110 = ( C { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_110 = R1_110 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_111 = ( C { val y: P }) with ( A { val y: P })
+ type R2_111 = R1_111 { val y: (P) with (P) }
+ type R1_112 = ( C { val y: P }) with ( A { val y: Q })
+ type R2_112 = R1_112 { val y: (P) with (Q) }
+ type R1_113 = ( C { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_113 = R1_113 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_114 = ( C { val y: P }) with ( B { val y: P })
+ type R2_114 = R1_114 { val y: (P) with (P) }
+ type R1_115 = ( C { val y: P }) with ( B { val y: Q })
+ type R2_115 = R1_115 { val y: (P) with (Q) }
+ type R1_116 = ( C { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_116 = R1_116 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_117 = ( C { val y: P }) with ( C { val y: P })
+ type R2_117 = R1_117 { val y: (P) with (P) }
+ type R1_118 = ( C { val y: P }) with ( C { val y: Q })
+ type R2_118 = R1_118 { val y: (P) with (Q) }
+ type R1_119 = ( C { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_119 = R1_119 { val y: (P) with (R forSome { type R <: P with Q }) }
+ type R1_120 = ( C { val y: Q }) with ( Any { val y: P })
+ type R2_120 = R1_120 { val y: (Q) with (P) }
+ type R1_121 = ( C { val y: Q }) with ( Any { val y: Q })
+ type R2_121 = R1_121 { val y: (Q) with (Q) }
+ type R1_122 = ( C { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_122 = R1_122 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_123 = ( C { val y: Q }) with ( A { val y: P })
+ type R2_123 = R1_123 { val y: (Q) with (P) }
+ type R1_124 = ( C { val y: Q }) with ( A { val y: Q })
+ type R2_124 = R1_124 { val y: (Q) with (Q) }
+ type R1_125 = ( C { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_125 = R1_125 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_126 = ( C { val y: Q }) with ( B { val y: P })
+ type R2_126 = R1_126 { val y: (Q) with (P) }
+ type R1_127 = ( C { val y: Q }) with ( B { val y: Q })
+ type R2_127 = R1_127 { val y: (Q) with (Q) }
+ type R1_128 = ( C { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_128 = R1_128 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_129 = ( C { val y: Q }) with ( C { val y: P })
+ type R2_129 = R1_129 { val y: (Q) with (P) }
+ type R1_130 = ( C { val y: Q }) with ( C { val y: Q })
+ type R2_130 = R1_130 { val y: (Q) with (Q) }
+ type R1_131 = ( C { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_131 = R1_131 { val y: (Q) with (R forSome { type R <: P with Q }) }
+ type R1_132 = ( C { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })
+ type R2_132 = R1_132 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_133 = ( C { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })
+ type R2_133 = R1_133 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_134 = ( C { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })
+ type R2_134 = R1_134 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_135 = ( C { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })
+ type R2_135 = R1_135 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_136 = ( C { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })
+ type R2_136 = R1_136 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_137 = ( C { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })
+ type R2_137 = R1_137 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_138 = ( C { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })
+ type R2_138 = R1_138 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_139 = ( C { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })
+ type R2_139 = R1_139 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_140 = ( C { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })
+ type R2_140 = R1_140 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ type R1_141 = ( C { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })
+ type R2_141 = R1_141 { val y: (R forSome { type R <: P with Q }) with (P) }
+ type R1_142 = ( C { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })
+ type R2_142 = R1_142 { val y: (R forSome { type R <: P with Q }) with (Q) }
+ type R1_143 = ( C { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })
+ type R2_143 = R1_143 { val y: (R forSome { type R <: P with Q }) with (R forSome { type R <: P with Q }) }
+ def f0 = { val x = ((new ABC): ( Any { val y: P }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f1 = { val x = ((new ABC): ( Any { val y: P }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f2 = { val x = ((new ABC): ( Any { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f3 = { val x = ((new ABC): ( Any { val y: P }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f4 = { val x = ((new ABC): ( Any { val y: P }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f5 = { val x = ((new ABC): ( Any { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f6 = { val x = ((new ABC): ( Any { val y: P }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f7 = { val x = ((new ABC): ( Any { val y: P }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f8 = { val x = ((new ABC): ( Any { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f9 = { val x = ((new ABC): ( Any { val y: P }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f10 = { val x = ((new ABC): ( Any { val y: P }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f11 = { val x = ((new ABC): ( Any { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f12 = { val x = ((new ABC): ( Any { val y: Q }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f13 = { val x = ((new ABC): ( Any { val y: Q }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f14 = { val x = ((new ABC): ( Any { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f15 = { val x = ((new ABC): ( Any { val y: Q }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f16 = { val x = ((new ABC): ( Any { val y: Q }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f17 = { val x = ((new ABC): ( Any { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f18 = { val x = ((new ABC): ( Any { val y: Q }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f19 = { val x = ((new ABC): ( Any { val y: Q }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f20 = { val x = ((new ABC): ( Any { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f21 = { val x = ((new ABC): ( Any { val y: Q }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f22 = { val x = ((new ABC): ( Any { val y: Q }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f23 = { val x = ((new ABC): ( Any { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f24 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f25 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f26 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f27 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f28 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f29 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f30 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f31 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f32 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f33 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f34 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f35 = { val x = ((new ABC): ( Any { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f36 = { val x = ((new ABC): ( A { val y: P }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f37 = { val x = ((new ABC): ( A { val y: P }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f38 = { val x = ((new ABC): ( A { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f39 = { val x = ((new ABC): ( A { val y: P }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f40 = { val x = ((new ABC): ( A { val y: P }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f41 = { val x = ((new ABC): ( A { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f42 = { val x = ((new ABC): ( A { val y: P }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f43 = { val x = ((new ABC): ( A { val y: P }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f44 = { val x = ((new ABC): ( A { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f45 = { val x = ((new ABC): ( A { val y: P }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f46 = { val x = ((new ABC): ( A { val y: P }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f47 = { val x = ((new ABC): ( A { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f48 = { val x = ((new ABC): ( A { val y: Q }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f49 = { val x = ((new ABC): ( A { val y: Q }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f50 = { val x = ((new ABC): ( A { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f51 = { val x = ((new ABC): ( A { val y: Q }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f52 = { val x = ((new ABC): ( A { val y: Q }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f53 = { val x = ((new ABC): ( A { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f54 = { val x = ((new ABC): ( A { val y: Q }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f55 = { val x = ((new ABC): ( A { val y: Q }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f56 = { val x = ((new ABC): ( A { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f57 = { val x = ((new ABC): ( A { val y: Q }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f58 = { val x = ((new ABC): ( A { val y: Q }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f59 = { val x = ((new ABC): ( A { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f60 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f61 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f62 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f63 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f64 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f65 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f66 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f67 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f68 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f69 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f70 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f71 = { val x = ((new ABC): ( A { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f72 = { val x = ((new ABC): ( B { val y: P }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f73 = { val x = ((new ABC): ( B { val y: P }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f74 = { val x = ((new ABC): ( B { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f75 = { val x = ((new ABC): ( B { val y: P }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f76 = { val x = ((new ABC): ( B { val y: P }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f77 = { val x = ((new ABC): ( B { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f78 = { val x = ((new ABC): ( B { val y: P }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f79 = { val x = ((new ABC): ( B { val y: P }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f80 = { val x = ((new ABC): ( B { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f81 = { val x = ((new ABC): ( B { val y: P }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f82 = { val x = ((new ABC): ( B { val y: P }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f83 = { val x = ((new ABC): ( B { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f84 = { val x = ((new ABC): ( B { val y: Q }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f85 = { val x = ((new ABC): ( B { val y: Q }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f86 = { val x = ((new ABC): ( B { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f87 = { val x = ((new ABC): ( B { val y: Q }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f88 = { val x = ((new ABC): ( B { val y: Q }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f89 = { val x = ((new ABC): ( B { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f90 = { val x = ((new ABC): ( B { val y: Q }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f91 = { val x = ((new ABC): ( B { val y: Q }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f92 = { val x = ((new ABC): ( B { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f93 = { val x = ((new ABC): ( B { val y: Q }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f94 = { val x = ((new ABC): ( B { val y: Q }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f95 = { val x = ((new ABC): ( B { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f96 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f97 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f98 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f99 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f100 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f101 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f102 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f103 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f104 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f105 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f106 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f107 = { val x = ((new ABC): ( B { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f108 = { val x = ((new ABC): ( C { val y: P }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f109 = { val x = ((new ABC): ( C { val y: P }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f110 = { val x = ((new ABC): ( C { val y: P }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f111 = { val x = ((new ABC): ( C { val y: P }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f112 = { val x = ((new ABC): ( C { val y: P }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f113 = { val x = ((new ABC): ( C { val y: P }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f114 = { val x = ((new ABC): ( C { val y: P }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f115 = { val x = ((new ABC): ( C { val y: P }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f116 = { val x = ((new ABC): ( C { val y: P }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f117 = { val x = ((new ABC): ( C { val y: P }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f118 = { val x = ((new ABC): ( C { val y: P }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f119 = { val x = ((new ABC): ( C { val y: P }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f120 = { val x = ((new ABC): ( C { val y: Q }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f121 = { val x = ((new ABC): ( C { val y: Q }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f122 = { val x = ((new ABC): ( C { val y: Q }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f123 = { val x = ((new ABC): ( C { val y: Q }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f124 = { val x = ((new ABC): ( C { val y: Q }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f125 = { val x = ((new ABC): ( C { val y: Q }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f126 = { val x = ((new ABC): ( C { val y: Q }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f127 = { val x = ((new ABC): ( C { val y: Q }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f128 = { val x = ((new ABC): ( C { val y: Q }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f129 = { val x = ((new ABC): ( C { val y: Q }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f130 = { val x = ((new ABC): ( C { val y: Q }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f131 = { val x = ((new ABC): ( C { val y: Q }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f132 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( Any { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f133 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( Any { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f134 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( Any { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f135 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( A { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f136 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( A { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f137 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( A { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f138 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( B { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f139 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( B { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f140 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( B { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def f141 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( C { val y: P })) ; x.y.reflected -> whatis(x).toString }
+ def f142 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( C { val y: Q })) ; x.y.reflected -> whatis(x).toString }
+ def f143 = { val x = ((new ABC): ( C { val y: R forSome { type R <: P with Q } }) with ( C { val y: R forSome { type R <: P with Q } })) ; x.y.reflected -> whatis(x).toString }
+ def g0(x: R1_0) = x.y
+ def g1(x: R1_1) = x.y
+ def g2(x: R1_2) = x.y
+ def g3(x: R1_3) = x.y
+ def g4(x: R1_4) = x.y
+ def g5(x: R1_5) = x.y
+ def g6(x: R1_6) = x.y
+ def g7(x: R1_7) = x.y
+ def g8(x: R1_8) = x.y
+ def g9(x: R1_9) = x.y
+ def g10(x: R1_10) = x.y
+ def g11(x: R1_11) = x.y
+ def g12(x: R1_12) = x.y
+ def g13(x: R1_13) = x.y
+ def g14(x: R1_14) = x.y
+ def g15(x: R1_15) = x.y
+ def g16(x: R1_16) = x.y
+ def g17(x: R1_17) = x.y
+ def g18(x: R1_18) = x.y
+ def g19(x: R1_19) = x.y
+ def g20(x: R1_20) = x.y
+ def g21(x: R1_21) = x.y
+ def g22(x: R1_22) = x.y
+ def g23(x: R1_23) = x.y
+ def g24(x: R1_24) = x.y
+ def g25(x: R1_25) = x.y
+ def g26(x: R1_26) = x.y
+ def g27(x: R1_27) = x.y
+ def g28(x: R1_28) = x.y
+ def g29(x: R1_29) = x.y
+ def g30(x: R1_30) = x.y
+ def g31(x: R1_31) = x.y
+ def g32(x: R1_32) = x.y
+ def g33(x: R1_33) = x.y
+ def g34(x: R1_34) = x.y
+ def g35(x: R1_35) = x.y
+ def g36(x: R1_36) = x.y
+ def g37(x: R1_37) = x.y
+ def g38(x: R1_38) = x.y
+ def g39(x: R1_39) = x.y
+ def g40(x: R1_40) = x.y
+ def g41(x: R1_41) = x.y
+ def g42(x: R1_42) = x.y
+ def g43(x: R1_43) = x.y
+ def g44(x: R1_44) = x.y
+ def g45(x: R1_45) = x.y
+ def g46(x: R1_46) = x.y
+ def g47(x: R1_47) = x.y
+ def g48(x: R1_48) = x.y
+ def g49(x: R1_49) = x.y
+ def g50(x: R1_50) = x.y
+ def g51(x: R1_51) = x.y
+ def g52(x: R1_52) = x.y
+ def g53(x: R1_53) = x.y
+ def g54(x: R1_54) = x.y
+ def g55(x: R1_55) = x.y
+ def g56(x: R1_56) = x.y
+ def g57(x: R1_57) = x.y
+ def g58(x: R1_58) = x.y
+ def g59(x: R1_59) = x.y
+ def g60(x: R1_60) = x.y
+ def g61(x: R1_61) = x.y
+ def g62(x: R1_62) = x.y
+ def g63(x: R1_63) = x.y
+ def g64(x: R1_64) = x.y
+ def g65(x: R1_65) = x.y
+ def g66(x: R1_66) = x.y
+ def g67(x: R1_67) = x.y
+ def g68(x: R1_68) = x.y
+ def g69(x: R1_69) = x.y
+ def g70(x: R1_70) = x.y
+ def g71(x: R1_71) = x.y
+ def g72(x: R1_72) = x.y
+ def g73(x: R1_73) = x.y
+ def g74(x: R1_74) = x.y
+ def g75(x: R1_75) = x.y
+ def g76(x: R1_76) = x.y
+ def g77(x: R1_77) = x.y
+ def g78(x: R1_78) = x.y
+ def g79(x: R1_79) = x.y
+ def g80(x: R1_80) = x.y
+ def g81(x: R1_81) = x.y
+ def g82(x: R1_82) = x.y
+ def g83(x: R1_83) = x.y
+ def g84(x: R1_84) = x.y
+ def g85(x: R1_85) = x.y
+ def g86(x: R1_86) = x.y
+ def g87(x: R1_87) = x.y
+ def g88(x: R1_88) = x.y
+ def g89(x: R1_89) = x.y
+ def g90(x: R1_90) = x.y
+ def g91(x: R1_91) = x.y
+ def g92(x: R1_92) = x.y
+ def g93(x: R1_93) = x.y
+ def g94(x: R1_94) = x.y
+ def g95(x: R1_95) = x.y
+ def g96(x: R1_96) = x.y
+ def g97(x: R1_97) = x.y
+ def g98(x: R1_98) = x.y
+ def g99(x: R1_99) = x.y
+ def g100(x: R1_100) = x.y
+ def g101(x: R1_101) = x.y
+ def g102(x: R1_102) = x.y
+ def g103(x: R1_103) = x.y
+ def g104(x: R1_104) = x.y
+ def g105(x: R1_105) = x.y
+ def g106(x: R1_106) = x.y
+ def g107(x: R1_107) = x.y
+ def g108(x: R1_108) = x.y
+ def g109(x: R1_109) = x.y
+ def g110(x: R1_110) = x.y
+ def g111(x: R1_111) = x.y
+ def g112(x: R1_112) = x.y
+ def g113(x: R1_113) = x.y
+ def g114(x: R1_114) = x.y
+ def g115(x: R1_115) = x.y
+ def g116(x: R1_116) = x.y
+ def g117(x: R1_117) = x.y
+ def g118(x: R1_118) = x.y
+ def g119(x: R1_119) = x.y
+ def g120(x: R1_120) = x.y
+ def g121(x: R1_121) = x.y
+ def g122(x: R1_122) = x.y
+ def g123(x: R1_123) = x.y
+ def g124(x: R1_124) = x.y
+ def g125(x: R1_125) = x.y
+ def g126(x: R1_126) = x.y
+ def g127(x: R1_127) = x.y
+ def g128(x: R1_128) = x.y
+ def g129(x: R1_129) = x.y
+ def g130(x: R1_130) = x.y
+ def g131(x: R1_131) = x.y
+ def g132(x: R1_132) = x.y
+ def g133(x: R1_133) = x.y
+ def g134(x: R1_134) = x.y
+ def g135(x: R1_135) = x.y
+ def g136(x: R1_136) = x.y
+ def g137(x: R1_137) = x.y
+ def g138(x: R1_138) = x.y
+ def g139(x: R1_139) = x.y
+ def g140(x: R1_140) = x.y
+ def g141(x: R1_141) = x.y
+ def g142(x: R1_142) = x.y
+ def g143(x: R1_143) = x.y
+ def h0(x: R2_0) = x.y
+ def h1(x: R2_1) = x.y
+ def h2(x: R2_2) = x.y
+ def h3(x: R2_3) = x.y
+ def h4(x: R2_4) = x.y
+ def h5(x: R2_5) = x.y
+ def h6(x: R2_6) = x.y
+ def h7(x: R2_7) = x.y
+ def h8(x: R2_8) = x.y
+ def h9(x: R2_9) = x.y
+ def h10(x: R2_10) = x.y
+ def h11(x: R2_11) = x.y
+ def h12(x: R2_12) = x.y
+ def h13(x: R2_13) = x.y
+ def h14(x: R2_14) = x.y
+ def h15(x: R2_15) = x.y
+ def h16(x: R2_16) = x.y
+ def h17(x: R2_17) = x.y
+ def h18(x: R2_18) = x.y
+ def h19(x: R2_19) = x.y
+ def h20(x: R2_20) = x.y
+ def h21(x: R2_21) = x.y
+ def h22(x: R2_22) = x.y
+ def h23(x: R2_23) = x.y
+ def h24(x: R2_24) = x.y
+ def h25(x: R2_25) = x.y
+ def h26(x: R2_26) = x.y
+ def h27(x: R2_27) = x.y
+ def h28(x: R2_28) = x.y
+ def h29(x: R2_29) = x.y
+ def h30(x: R2_30) = x.y
+ def h31(x: R2_31) = x.y
+ def h32(x: R2_32) = x.y
+ def h33(x: R2_33) = x.y
+ def h34(x: R2_34) = x.y
+ def h35(x: R2_35) = x.y
+ def h36(x: R2_36) = x.y
+ def h37(x: R2_37) = x.y
+ def h38(x: R2_38) = x.y
+ def h39(x: R2_39) = x.y
+ def h40(x: R2_40) = x.y
+ def h41(x: R2_41) = x.y
+ def h42(x: R2_42) = x.y
+ def h43(x: R2_43) = x.y
+ def h44(x: R2_44) = x.y
+ def h45(x: R2_45) = x.y
+ def h46(x: R2_46) = x.y
+ def h47(x: R2_47) = x.y
+ def h48(x: R2_48) = x.y
+ def h49(x: R2_49) = x.y
+ def h50(x: R2_50) = x.y
+ def h51(x: R2_51) = x.y
+ def h52(x: R2_52) = x.y
+ def h53(x: R2_53) = x.y
+ def h54(x: R2_54) = x.y
+ def h55(x: R2_55) = x.y
+ def h56(x: R2_56) = x.y
+ def h57(x: R2_57) = x.y
+ def h58(x: R2_58) = x.y
+ def h59(x: R2_59) = x.y
+ def h60(x: R2_60) = x.y
+ def h61(x: R2_61) = x.y
+ def h62(x: R2_62) = x.y
+ def h63(x: R2_63) = x.y
+ def h64(x: R2_64) = x.y
+ def h65(x: R2_65) = x.y
+ def h66(x: R2_66) = x.y
+ def h67(x: R2_67) = x.y
+ def h68(x: R2_68) = x.y
+ def h69(x: R2_69) = x.y
+ def h70(x: R2_70) = x.y
+ def h71(x: R2_71) = x.y
+ def h72(x: R2_72) = x.y
+ def h73(x: R2_73) = x.y
+ def h74(x: R2_74) = x.y
+ def h75(x: R2_75) = x.y
+ def h76(x: R2_76) = x.y
+ def h77(x: R2_77) = x.y
+ def h78(x: R2_78) = x.y
+ def h79(x: R2_79) = x.y
+ def h80(x: R2_80) = x.y
+ def h81(x: R2_81) = x.y
+ def h82(x: R2_82) = x.y
+ def h83(x: R2_83) = x.y
+ def h84(x: R2_84) = x.y
+ def h85(x: R2_85) = x.y
+ def h86(x: R2_86) = x.y
+ def h87(x: R2_87) = x.y
+ def h88(x: R2_88) = x.y
+ def h89(x: R2_89) = x.y
+ def h90(x: R2_90) = x.y
+ def h91(x: R2_91) = x.y
+ def h92(x: R2_92) = x.y
+ def h93(x: R2_93) = x.y
+ def h94(x: R2_94) = x.y
+ def h95(x: R2_95) = x.y
+ def h96(x: R2_96) = x.y
+ def h97(x: R2_97) = x.y
+ def h98(x: R2_98) = x.y
+ def h99(x: R2_99) = x.y
+ def h100(x: R2_100) = x.y
+ def h101(x: R2_101) = x.y
+ def h102(x: R2_102) = x.y
+ def h103(x: R2_103) = x.y
+ def h104(x: R2_104) = x.y
+ def h105(x: R2_105) = x.y
+ def h106(x: R2_106) = x.y
+ def h107(x: R2_107) = x.y
+ def h108(x: R2_108) = x.y
+ def h109(x: R2_109) = x.y
+ def h110(x: R2_110) = x.y
+ def h111(x: R2_111) = x.y
+ def h112(x: R2_112) = x.y
+ def h113(x: R2_113) = x.y
+ def h114(x: R2_114) = x.y
+ def h115(x: R2_115) = x.y
+ def h116(x: R2_116) = x.y
+ def h117(x: R2_117) = x.y
+ def h118(x: R2_118) = x.y
+ def h119(x: R2_119) = x.y
+ def h120(x: R2_120) = x.y
+ def h121(x: R2_121) = x.y
+ def h122(x: R2_122) = x.y
+ def h123(x: R2_123) = x.y
+ def h124(x: R2_124) = x.y
+ def h125(x: R2_125) = x.y
+ def h126(x: R2_126) = x.y
+ def h127(x: R2_127) = x.y
+ def h128(x: R2_128) = x.y
+ def h129(x: R2_129) = x.y
+ def h130(x: R2_130) = x.y
+ def h131(x: R2_131) = x.y
+ def h132(x: R2_132) = x.y
+ def h133(x: R2_133) = x.y
+ def h134(x: R2_134) = x.y
+ def h135(x: R2_135) = x.y
+ def h136(x: R2_136) = x.y
+ def h137(x: R2_137) = x.y
+ def h138(x: R2_138) = x.y
+ def h139(x: R2_139) = x.y
+ def h140(x: R2_140) = x.y
+ def h141(x: R2_141) = x.y
+ def h142(x: R2_142) = x.y
+ def h143(x: R2_143) = x.y
+ lazy val fcalls = List(
+ f0,
+ f1,
+ f2,
+ f3,
+ f4,
+ f5,
+ f6,
+ f7,
+ f8,
+ f9,
+ f10,
+ f11,
+ f12,
+ f13,
+ f14,
+ f15,
+ f16,
+ f17,
+ f18,
+ f19,
+ f20,
+ f21,
+ f22,
+ f23,
+ f24,
+ f25,
+ f26,
+ f27,
+ f28,
+ f29,
+ f30,
+ f31,
+ f32,
+ f33,
+ f34,
+ f35,
+ f36,
+ f37,
+ f38,
+ f39,
+ f40,
+ f41,
+ f42,
+ f43,
+ f44,
+ f45,
+ f46,
+ f47,
+ f48,
+ f49,
+ f50,
+ f51,
+ f52,
+ f53,
+ f54,
+ f55,
+ f56,
+ f57,
+ f58,
+ f59,
+ f60,
+ f61,
+ f62,
+ f63,
+ f64,
+ f65,
+ f66,
+ f67,
+ f68,
+ f69,
+ f70,
+ f71,
+ f72,
+ f73,
+ f74,
+ f75,
+ f76,
+ f77,
+ f78,
+ f79,
+ f80,
+ f81,
+ f82,
+ f83,
+ f84,
+ f85,
+ f86,
+ f87,
+ f88,
+ f89,
+ f90,
+ f91,
+ f92,
+ f93,
+ f94,
+ f95,
+ f96,
+ f97,
+ f98,
+ f99,
+ f100,
+ f101,
+ f102,
+ f103,
+ f104,
+ f105,
+ f106,
+ f107,
+ f108,
+ f109,
+ f110,
+ f111,
+ f112,
+ f113,
+ f114,
+ f115,
+ f116,
+ f117,
+ f118,
+ f119,
+ f120,
+ f121,
+ f122,
+ f123,
+ f124,
+ f125,
+ f126,
+ f127,
+ f128,
+ f129,
+ f130,
+ f131,
+ f132,
+ f133,
+ f134,
+ f135,
+ f136,
+ f137,
+ f138,
+ f139,
+ f140,
+ f141,
+ f142,
+ f143
+ )
+
+ def main(args: Array[String]) {
+ sshow("Direct Calls", fcalls collect { case (false, n) => n })
+ sshow("Reflective Calls", fcalls collect { case (true, n) => n })
+ // For a good time try printing this - have to fix bugs in
+ // reflection before that's going to be a good idea
+ // println(typeOf[Test.type].typeSymbol.asClass.typeSignature)
+ }
+}
diff --git a/test/files/run/t3488.check b/test/files/run/t3488.check
index 0d66ea1aee..314dfc7838 100644
--- a/test/files/run/t3488.check
+++ b/test/files/run/t3488.check
@@ -1,2 +1,8 @@
+t3488.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ println(foo { val List(_*)=List(0); 1 } ())
+ ^
+t3488.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ println(foo { val List(_*)=List(0); 1 } (1))
+ ^
0
1
diff --git a/test/files/run/t3507-new.scala b/test/files/run/t3507-new.scala
index f045755b8f..bd16849e8a 100644
--- a/test/files/run/t3507-new.scala
+++ b/test/files/run/t3507-new.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ existentials }
import scala.reflect.runtime.universe._
class A {
@@ -14,4 +16,4 @@ object Test extends App {
def mani[T: TypeTag](x: T) = println(typeOf[T])
mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
// --> _1 is not in scope here
-} \ No newline at end of file
+}
diff --git a/test/files/run/t3529.scala b/test/files/run/t3529.scala
index bb82424bf6..a5977d0a6c 100644
--- a/test/files/run/t3529.scala
+++ b/test/files/run/t3529.scala
@@ -1,3 +1,4 @@
+import scala.language.postfixOps
object Test {
def main(args: Array[String]): Unit = {
assert(1 to 10 drop 10 isEmpty)
diff --git a/test/files/run/t3651.scala b/test/files/run/t3651.scala
index 49ae173249..3a6dda0de3 100644
--- a/test/files/run/t3651.scala
+++ b/test/files/run/t3651.scala
@@ -5,6 +5,6 @@ class LongKlass( override val a: Long ) extends Klass[Long](a)
object Test {
def main(args: Array[String]) {
val lk = new LongKlass(10)
- lk.a
+ val a = lk.a
}
}
diff --git a/test/files/run/t3705.scala b/test/files/run/t3705.scala
index fcc020f28c..3ebf6fc95d 100644
--- a/test/files/run/t3705.scala
+++ b/test/files/run/t3705.scala
@@ -2,16 +2,16 @@
import scala.xml._
object Test {
+ // guard caused verifyerror in oldpatmat
def updateNodes(ns: Seq[Node]): Seq[Node] =
for(subnode <- ns) yield subnode match {
case <d>{_}</d> if true => <d>abc</d>
case Elem(prefix, label, attribs, scope, children @ _*) =>
- Elem(prefix, label, attribs, scope, updateNodes(children) : _*)
+ Elem(prefix, label, attribs, scope, minimizeEmpty = true, updateNodes(children) : _*)
case other => other
}
def main(args: Array[String]): Unit = {
updateNodes(<b />)
-
}
}
diff --git a/test/files/run/t3758-old.scala b/test/files/run/t3758-old.scala
index f00254afee..d5e4a6cc1c 100644
--- a/test/files/run/t3758-old.scala
+++ b/test/files/run/t3758-old.scala
@@ -1,3 +1,5 @@
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
def main(args: Array[String]): Unit = {
assert(classManifest[Array[String]].typeArguments contains classManifest[String])
@@ -7,4 +9,4 @@ object Test {
assert(manifest[Array[Int]].typeArguments contains manifest[Int])
assert(manifest[Array[Float]].typeArguments contains manifest[Float])
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t3855.scala b/test/files/run/t3855.scala
index e55714201f..d8029e16f0 100644
--- a/test/files/run/t3855.scala
+++ b/test/files/run/t3855.scala
@@ -3,11 +3,11 @@ object Test {
def closure[A](f: () => A) = f()
def f1(s: String) = {
- var n = try { s.toInt } catch { case _ => 1 }
+ var n = try { s.toInt } catch { case _: Throwable => 1 }
byval(n)
}
def f2(s: String) = {
- var n = try { s.toInt } catch { case _ => 1 }
+ var n = try { s.toInt } catch { case _: Throwable => 1 }
closure(() => n)
}
diff --git a/test/files/run/t3888.check b/test/files/run/t3888.check
new file mode 100644
index 0000000000..844ca54682
--- /dev/null
+++ b/test/files/run/t3888.check
@@ -0,0 +1 @@
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
diff --git a/test/files/run/t3888.scala b/test/files/run/t3888.scala
index e0f1453b1d..1914072599 100644
--- a/test/files/run/t3888.scala
+++ b/test/files/run/t3888.scala
@@ -1,3 +1,6 @@
+
+// in a match, which notion of equals prevails?
+// extending Tuple doesn't seem to be at issue here.
object Test {
val T1 = new P
@@ -23,4 +26,4 @@ object Test {
class P extends Pair(1, 1) {
override def equals(x: Any) = true
-} \ No newline at end of file
+}
diff --git a/test/files/run/t3935.scala b/test/files/run/t3935.scala
index c66b1b0599..fa1bbf8d3f 100644
--- a/test/files/run/t3935.scala
+++ b/test/files/run/t3935.scala
@@ -9,7 +9,7 @@ object Test {
try {
assert(q.front != null)
} catch {
- case _ =>
+ case _: Throwable =>
}
}
}
diff --git a/test/files/run/t3964.scala b/test/files/run/t3964.scala
index 80ba361a20..68ff9a44c4 100644
--- a/test/files/run/t3964.scala
+++ b/test/files/run/t3964.scala
@@ -1,3 +1,6 @@
+
+import scala.language.implicitConversions
+
object Test {
class Base
object Bob extends Base
diff --git a/test/files/run/t4023.scala b/test/files/run/t4023.scala
index 4846fa31b4..38190cfa5c 100644
--- a/test/files/run/t4023.scala
+++ b/test/files/run/t4023.scala
@@ -7,17 +7,28 @@ object Test {
object B5 extends B1
private object B6 extends B2
- val valuesTry1 = this.getClass.getDeclaredClasses
- val valuesTry2 = C.getClass.getDeclaredClasses
- val valuesTry3 = getClass.getDeclaredClasses
+ val classes1 = this.getClass.getDeclaredClasses
+ val classes2 = C.getClass .getDeclaredClasses
+ val classes3 = getClass .getDeclaredClasses
+ }
+
+ // sortBy(_.getName) introduces additional classes which we don't want to see in C,
+ // so we call sortBy outside of C.
+ object TestHelper {
+ val valuesTry1 = C.classes1.sortBy(_.getName)
+ val valuesTry2 = C.classes2.sortBy(_.getName)
+ val valuesTry3 = C.classes3.sortBy(_.getName)
}
def main(args: Array[String]) {
- println("Try 1: (" + C.valuesTry1.length + " classes)")
- C.valuesTry1.foreach(println)
- println("Try 2: (" + C.valuesTry2.length + " classes)")
- C.valuesTry2.foreach(println)
- println("Try 3: (" + C.valuesTry3.length + " classes)")
- C.valuesTry3.foreach(println)
+ println("Try 1: (" + TestHelper.valuesTry1.length + " classes)")
+ TestHelper.valuesTry1.foreach(println)
+ println("Try 2: (" + TestHelper.valuesTry2.length + " classes)")
+ TestHelper.valuesTry2.foreach(println)
+ println("Try 3: (" + TestHelper.valuesTry3.length + " classes)")
+ TestHelper.valuesTry3.foreach(println)
}
-} \ No newline at end of file
+
+
+}
+
diff --git a/test/files/run/t4047.check b/test/files/run/t4047.check
index 2a942a70e0..3c41e6e244 100644
--- a/test/files/run/t4047.check
+++ b/test/files/run/t4047.check
@@ -1,3 +1,15 @@
+t4047.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ a.foo
+ ^
+t4047.scala:24: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ a.foo
+ ^
+t4047.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ b.foo
+ ^
+t4047.scala:27: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ b.foo
+ ^
Unit: called A.foo
Unit: called B.foo
Unit: called C.foo
diff --git a/test/files/run/t4072.scala b/test/files/run/t4072.scala
index 872072a4ce..bff2e0c7d2 100644
--- a/test/files/run/t4072.scala
+++ b/test/files/run/t4072.scala
@@ -1,5 +1,7 @@
import scala.tools.nsc._
+import scala.language.{ reflectiveCalls }
+
object Test {
class DryRun {
val compiler = new Global(new Settings()) {
diff --git a/test/files/run/t4080.scala b/test/files/run/t4080.scala
index 92740ed776..1d1418847d 100644
--- a/test/files/run/t4080.scala
+++ b/test/files/run/t4080.scala
@@ -1,4 +1,5 @@
import scala.collection.mutable.LinkedList
+import java.util.NoSuchElementException
object Test {
def main(args: Array[String]) {
@@ -6,7 +7,7 @@ object Test {
ll.insert(LinkedList(0))
println(ll)
val ll2 = LinkedList[Int]()
- try println(ll2.head)
- catch { case _ => () }
+ try println("Empty head? " + ll2.head)
+ catch { case _: NoSuchElementException => () }
}
}
diff --git a/test/files/run/t4148.scala b/test/files/run/t4148.scala
index 0739403594..6de0c8fe7a 100644
--- a/test/files/run/t4148.scala
+++ b/test/files/run/t4148.scala
@@ -1,7 +1,7 @@
object Test {
- val x1 = try { "aaa".asInstanceOf[Int] } catch { case _ => "cce1" }
- val x2 = try { (5: Any).asInstanceOf[Int] } catch { case _ => "cce2" }
- val x3 = try { (new java.lang.Short(100.toShort).asInstanceOf[Int]) } catch { case _ => "cce3" }
+ val x1 = try { "aaa".asInstanceOf[Int] } catch { case _: Throwable => "cce1" }
+ val x2 = try { (5: Any).asInstanceOf[Int] } catch { case _: Throwable => "cce2" }
+ val x3 = try { (new java.lang.Short(100.toShort).asInstanceOf[Int]) } catch { case _: Throwable => "cce3" }
def main(args: Array[String]): Unit = {
List(x1, x2, x3) foreach println
diff --git a/test/files/run/t4171.scala b/test/files/run/t4171.scala
index fba2fb5ed6..7f6dfd48ce 100644
--- a/test/files/run/t4171.scala
+++ b/test/files/run/t4171.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ reflectiveCalls }
+
object Test {
val c = { class C; new C { def foo = 1 } }
val a = { class B { def bar = 5 }; class C extends B; new C }
diff --git a/test/files/run/t4398.scala b/test/files/run/t4398.scala
index 1d57eb688d..87dc870afc 100644
--- a/test/files/run/t4398.scala
+++ b/test/files/run/t4398.scala
@@ -1,5 +1,7 @@
+import scala.language.{ postfixOps }
+
object Test {
def main(args: Array[String]) {
val x = 1 to 10 toSet
diff --git a/test/files/run/t4560.scala b/test/files/run/t4560.scala
index 9979199067..ee657e47eb 100644
--- a/test/files/run/t4560.scala
+++ b/test/files/run/t4560.scala
@@ -7,6 +7,9 @@
// TEST 1
// self-type is other trait
+
+import scala.language.{ reflectiveCalls }
+
trait Aa
trait Ab
diff --git a/test/files/run/t4660.scala b/test/files/run/t4660.scala
index e57bb4bf25..9aac10ddfd 100644
--- a/test/files/run/t4660.scala
+++ b/test/files/run/t4660.scala
@@ -3,7 +3,7 @@ object Test {
val traversable = 1 to 20 map (_.toString)
def normalize(m: Map[Char, Traversable[String]]) = m.map { case (k,v) => (k, v.toList) }
- val groupedFromView = (traversable view).groupBy(_(0))
+ val groupedFromView = traversable.view.groupBy(_(0))
val groupedFromStrict = traversable.groupBy(_(0))
assert(normalize(groupedFromView) == normalize(groupedFromStrict))
diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check
index b5cfc651f2..b2e5209dc5 100644
--- a/test/files/run/t4680.check
+++ b/test/files/run/t4680.check
@@ -1,3 +1,9 @@
+t4680.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ new C { 5 }
+ ^
+t4680.scala:69: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ new { val x = 5 } with E() { 5 }
+ ^
// new C { }
diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala
index a80afb0257..f823433ded 100644
--- a/test/files/run/t4729/S_2.scala
+++ b/test/files/run/t4729/S_2.scala
@@ -1,3 +1,4 @@
+import scala.language.reflectiveCalls
// Scala class:
class ScalaVarArgs extends J_1 {
// -- no problem on overriding it using ordinary class
diff --git a/test/files/run/t4766.scala b/test/files/run/t4766.scala
index c2a864ddb2..d67431f50e 100644
--- a/test/files/run/t4766.scala
+++ b/test/files/run/t4766.scala
@@ -1,3 +1,7 @@
+
+import scala.language.postfixOps
+import scala.language.reflectiveCalls
+
object Test extends App {
val x = new {
def > = 1
diff --git a/test/files/run/t4777.scala b/test/files/run/t4777.scala
index 4a811d3b9a..6c7b856e39 100644
--- a/test/files/run/t4777.scala
+++ b/test/files/run/t4777.scala
@@ -3,6 +3,6 @@ class DefaultsTest(x: Int = 25) extends A(28)
object DefaultsTest extends DefaultsTest(12)
object Test extends App {
- println(new DefaultsTest() a)
- println(DefaultsTest a)
+ println(new DefaultsTest().a)
+ println(DefaultsTest.a)
}
diff --git a/test/files/run/t4794.scala b/test/files/run/t4794.scala
index afe89fa429..720906f507 100644
--- a/test/files/run/t4794.scala
+++ b/test/files/run/t4794.scala
@@ -7,6 +7,7 @@ class Arr[@specialized A](val arr: Array[A]) {
object Test {
def main(args: Array[String]): Unit = {
- println(classOf[Arr[_]].getMethods filter (_.getName contains "quux") size) // expect 10, not 1
+ def quuxae = classOf[Arr[_]].getMethods filter (_.getName contains "quux")
+ println(quuxae.size) // expect 10, not 1
}
}
diff --git a/test/files/run/t4929.scala b/test/files/run/t4929.scala
index 3208cd1b09..1b0e8672d5 100644
--- a/test/files/run/t4929.scala
+++ b/test/files/run/t4929.scala
@@ -2,6 +2,7 @@ import scala.util.parsing.json._
import java.util.concurrent._
import collection.JavaConversions._
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
val LIMIT = 2000
diff --git a/test/files/run/t498.scala b/test/files/run/t498.scala
index b4ede951f0..5c10e6630f 100644
--- a/test/files/run/t498.scala
+++ b/test/files/run/t498.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
object Test extends App {
// the function passed to flatMap produces lots of empty streams, but this should not overflow the stack
val res = Stream.from(1).flatMap(i => if (i < 3000) Stream.empty else List(1))
diff --git a/test/files/run/t5053.scala b/test/files/run/t5053.scala
index e46dad5ac6..233edf6780 100644
--- a/test/files/run/t5053.scala
+++ b/test/files/run/t5053.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ existentials }
+
object Test extends App {
{
val (left, right) = Seq((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
diff --git a/test/files/run/t5080.scala b/test/files/run/t5080.scala
index ce72d13a54..acb6167f46 100644
--- a/test/files/run/t5080.scala
+++ b/test/files/run/t5080.scala
@@ -1,3 +1,7 @@
+
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+
object Test extends App {
abstract class Value {
diff --git a/test/files/run/t5224.check b/test/files/run/t5224.check
index e15c1c90eb..b11480acdf 100644
--- a/test/files/run/t5224.check
+++ b/test/files/run/t5224.check
@@ -1,3 +1,8 @@
+t5224.scala:3: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class Foo(bar: String) extends annotation.ClassfileAnnotation
+ ^
{
@new Foo(bar = "qwe") class C extends AnyRef {
def <init>() = {
diff --git a/test/files/run/t5277_1.scala b/test/files/run/t5277_1.scala
index a2d546579d..65232967c2 100644
--- a/test/files/run/t5277_1.scala
+++ b/test/files/run/t5277_1.scala
@@ -1,6 +1,7 @@
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
+import scala.language.{ implicitConversions, postfixOps }
object Test extends App {
reify {
def fact(n: Int): BigInt =
@@ -12,4 +13,4 @@ object Test extends App {
println("10! = " + (10!))
}.eval
-} \ No newline at end of file
+}
diff --git a/test/files/run/t5284b.check b/test/files/run/t5284b.check
index 98d9bcb75a..71426ad0b7 100644
--- a/test/files/run/t5284b.check
+++ b/test/files/run/t5284b.check
@@ -1 +1,4 @@
+t5284b.scala:27: warning: type S is unused or used in non-specializable positions.
+ def bar[@specialized(Int) W <: T, @specialized(Int) S](w: W) = id(w)
+ ^
17
diff --git a/test/files/run/t5284c.check b/test/files/run/t5284c.check
index 00750edc07..cf578ad102 100644
--- a/test/files/run/t5284c.check
+++ b/test/files/run/t5284c.check
@@ -1 +1,4 @@
+t5284c.scala:29: warning: type W is unused or used in non-specializable positions.
+ def bar[@specialized(Int) W <: T](ws: List[W]) = len(ws)
+ ^
3
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
index 7da8726a1f..febfd9c3ed 100644
--- a/test/files/run/t5313.scala
+++ b/test/files/run/t5313.scala
@@ -7,7 +7,7 @@ object Test extends IcodeTest {
override def code =
"""class Foo {
- def randomBoolean = util.Random.nextInt % 2 == 0
+ def randomBoolean = scala.util.Random.nextInt % 2 == 0
def bar = {
var kept1 = new Object
val result = new java.lang.ref.WeakReference(kept1)
diff --git a/test/files/run/t5353.scala b/test/files/run/t5353.scala
deleted file mode 100644
index 5208fe527f..0000000000
--- a/test/files/run/t5353.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Test extends App {
- def f(x: Boolean) = if (x) Array("abc") else Array()
- try {
- println(f(true).length)
- println(f(false).length)
- } catch {
- case ex: Throwable => println(ex.getMessage)
- }
-}
diff --git a/test/files/run/t5356.scala b/test/files/run/t5356.scala
index ec17e036ad..dabb9ef855 100644
--- a/test/files/run/t5356.scala
+++ b/test/files/run/t5356.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ reflectiveCalls }
object Test {
def f(x: Any { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName)
diff --git a/test/files/run/t5375.check b/test/files/run/t5375.check
index 7d3002ffda..b1a57eeeec 100644
--- a/test/files/run/t5375.check
+++ b/test/files/run/t5375.check
@@ -1 +1 @@
-Composite throwable \ No newline at end of file
+Runtime exception
diff --git a/test/files/run/t5375.scala b/test/files/run/t5375.scala
index e4b329deae..826ecd841e 100644
--- a/test/files/run/t5375.scala
+++ b/test/files/run/t5375.scala
@@ -1,19 +1,8 @@
-
-
-
-import collection.parallel.CompositeThrowable
-
-
-
-object Test {
-
- def main(args: Array[String]) {
- val foos = (1 to 1000) toSeq;
- try {
- foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i)
- } catch {
- case CompositeThrowable(thr) => println("Composite throwable")
- }
+object Test extends App {
+ val foos = (1 to 1000).toSeq
+ try
+ foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i)
+ catch {
+ case ex: RuntimeException => println("Runtime exception")
}
-
}
diff --git a/test/files/run/t5380.check b/test/files/run/t5380.check
new file mode 100644
index 0000000000..731a798301
--- /dev/null
+++ b/test/files/run/t5380.check
@@ -0,0 +1,9 @@
+t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ val f = () => return try { 1 } catch { case _: Throwable => 0 }
+ ^
+t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ val f = () => return try { 1 } catch { case _: Throwable => 0 }
+ ^
+t5380.scala:3: warning: enclosing method main has result type Unit: return value discarded
+ val f = () => return try { 1 } catch { case _: Throwable => 0 }
+ ^
diff --git a/test/files/run/t5380.scala b/test/files/run/t5380.scala
index 6083161a9b..66d12a0ca6 100644
--- a/test/files/run/t5380.scala
+++ b/test/files/run/t5380.scala
@@ -1,6 +1,6 @@
object Test {
def main(args: Array[String]) {
- val f = () => return try { 1 } catch { case _ => 0 }
+ val f = () => return try { 1 } catch { case _: Throwable => 0 }
f()
}
}
diff --git a/test/files/run/t5428.scala b/test/files/run/t5428.scala
index 106bb7fc31..fb58cbbe24 100644
--- a/test/files/run/t5428.scala
+++ b/test/files/run/t5428.scala
@@ -23,7 +23,7 @@ object Test {
println(a)
- a pop
+ a.pop
}
}
diff --git a/test/files/run/t5568.flags b/test/files/run/t5568.flags
new file mode 100644
index 0000000000..ad51758c39
--- /dev/null
+++ b/test/files/run/t5568.flags
@@ -0,0 +1 @@
+-nowarn
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
index 3b2eb55313..188f39ff82 100644
--- a/test/files/run/t5603.check
+++ b/test/files/run/t5603.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of parser]] // newSource1
+[[syntax trees at end of parser]] // newSource1.scala
[0:241]package [0:0]<empty> {
[0:82]abstract trait Greeting extends [15:82][83]scala.AnyRef {
[15]def $init$() = [15]{
diff --git a/test/files/run/t5629b.scala b/test/files/run/t5629b.scala
index 6c908081b9..adb9ca5498 100644
--- a/test/files/run/t5629b.scala
+++ b/test/files/run/t5629b.scala
@@ -33,7 +33,7 @@ object Test extends App {
val pf = newPF(1)
println("=== pf(1):")
- try { pf(1) } catch { case x => println(x) }
+ try { pf(1) } catch { case x: Throwable => println(x) }
println("=== pf(42):")
pf(42)
println("=== done")
diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala
index 5cef67e3b1..ec3b1d26b4 100755
--- a/test/files/run/t5699.scala
+++ b/test/files/run/t5699.scala
@@ -1,5 +1,5 @@
import scala.tools.partest.DirectTest
-import scala.tools.nsc.util.BatchSourceFile
+import scala.reflect.internal.util.BatchSourceFile
object Test extends DirectTest {
// Java code
diff --git a/test/files/run/t576.scala b/test/files/run/t576.scala
index 756a241572..5c8c9a90cb 100644
--- a/test/files/run/t576.scala
+++ b/test/files/run/t576.scala
@@ -1,3 +1,5 @@
+import scala.language.reflectiveCalls
+
class A {
override def equals(other: Any) = other match {
case _: this.type => true
diff --git a/test/files/run/t5881.scala b/test/files/run/t5881.scala
index 01bee29181..04b24b713d 100644
--- a/test/files/run/t5881.scala
+++ b/test/files/run/t5881.scala
@@ -1,6 +1,7 @@
+import scala.language.existentials
import scala.reflect.ClassTag
object Test extends App {
println(implicitly[ClassTag[List[T forSome {type T <: List[T]}]]])
println(implicitly[ClassTag[List[Any]]])
-} \ No newline at end of file
+}
diff --git a/test/files/run/t5912.scala b/test/files/run/t5912.scala
index 7710d04396..9418e946d0 100644
--- a/test/files/run/t5912.scala
+++ b/test/files/run/t5912.scala
@@ -1,6 +1,7 @@
+import scala.language.existentials
object Test extends App{
import scala.reflect.runtime.{currentMirror=>cm}
import scala.tools.reflect._
import scala.reflect.runtime.universe._
val tree = cm.mkToolBox().typeCheck( Literal(Constant("test")) )
-} \ No newline at end of file
+}
diff --git a/test/files/run/t5923a.check b/test/files/run/t5923a.check
new file mode 100644
index 0000000000..7165b734ac
--- /dev/null
+++ b/test/files/run/t5923a.check
@@ -0,0 +1,3 @@
+C(Int)
+C(String)
+C(Nothing)
diff --git a/test/files/run/t5923a/Macros_1.scala b/test/files/run/t5923a/Macros_1.scala
new file mode 100644
index 0000000000..6d21362c4d
--- /dev/null
+++ b/test/files/run/t5923a/Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+case class C[T](t: String)
+object C {
+ implicit def foo[T]: C[T] = macro Macros.impl[T]
+}
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context) = {
+ import c.universe._
+ reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t5923a/Test_2.scala b/test/files/run/t5923a/Test_2.scala
new file mode 100644
index 0000000000..001ff9aea8
--- /dev/null
+++ b/test/files/run/t5923a/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ println(implicitly[C[Int]])
+ println(implicitly[C[String]])
+ println(implicitly[C[Nothing]])
+} \ No newline at end of file
diff --git a/test/files/run/t5923b.check b/test/files/run/t5923b.check
new file mode 100644
index 0000000000..d56076f84e
--- /dev/null
+++ b/test/files/run/t5923b.check
@@ -0,0 +1,3 @@
+class [Ljava.lang.Object;
+class [Ljava.lang.Object;
+class [Ljava.lang.Object;
diff --git a/test/files/run/t5923b/Test.scala b/test/files/run/t5923b/Test.scala
new file mode 100644
index 0000000000..7c2627462a
--- /dev/null
+++ b/test/files/run/t5923b/Test.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ import scala.collection.generic.CanBuildFrom
+ val cbf = implicitly[CanBuildFrom[Nothing, Nothing, Array[Nothing]]]
+ println(cbf().result.getClass)
+ println(new Array[Nothing](0).getClass)
+ println(Array[Nothing]().getClass)
+} \ No newline at end of file
diff --git a/test/files/run/t5942.scala b/test/files/run/t5942.scala
index 44a8be93f6..c90d29e1ca 100644
--- a/test/files/run/t5942.scala
+++ b/test/files/run/t5942.scala
@@ -5,6 +5,6 @@ import scala.tools.reflect._
object Test extends App {
val tb = cm.mkToolBox()
tb.parse("def x = {}")
- try { tb.parse("def x = {") } catch { case _ => }
+ try { tb.parse("def x = {") } catch { case _: Throwable => }
tb.parse("def x = {}")
}
diff --git a/test/files/run/t6011c.check b/test/files/run/t6011c.check
new file mode 100644
index 0000000000..088e6fdaae
--- /dev/null
+++ b/test/files/run/t6011c.check
@@ -0,0 +1,3 @@
+t6011c.scala:11: warning: unreachable code
+ case 1 => 3 // crash
+ ^
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index 2ec639fce2..46974589d3 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of lambdalift]] // newSource1
+[[syntax trees at end of lambdalift]] // newSource1.scala
package <empty> {
class T extends Object {
<paramaccessor> val classParam: Int = _;
@@ -15,11 +15,11 @@ package <empty> {
}
};
def bar(barParam: Int): Object = {
- @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(null);
+ @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
};
def tryy(tryyParam: Int): Function0 = {
- var tryyLocal: runtime.IntRef = new runtime.IntRef(0);
+ var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0);
{
(new anonymous class $anonfun$tryy$1(T.this, tryyParam, tryyLocal): Function0)
}
diff --git a/test/files/run/t603.scala b/test/files/run/t603.scala
index b8825c933b..54fb8e6974 100644
--- a/test/files/run/t603.scala
+++ b/test/files/run/t603.scala
@@ -1,4 +1,6 @@
object forceDelay {
+ import scala.language.implicitConversions
+
class Susp[+A](lazyValue: => A) extends Function0[A] {
private var func: () => Any = () => lazyValue
private var value: Any = null
diff --git a/test/files/run/t6102.check b/test/files/run/t6102.check
index b6fc4c620b..97e455647b 100644
--- a/test/files/run/t6102.check
+++ b/test/files/run/t6102.check
@@ -1 +1,29 @@
-hello \ No newline at end of file
+[running phase parser on t6102.scala]
+[running phase namer on t6102.scala]
+[running phase packageobjects on t6102.scala]
+[running phase typer on t6102.scala]
+[running phase patmat on t6102.scala]
+[running phase superaccessors on t6102.scala]
+[running phase extmethods on t6102.scala]
+[running phase pickler on t6102.scala]
+[running phase refchecks on t6102.scala]
+[running phase uncurry on t6102.scala]
+[running phase tailcalls on t6102.scala]
+[running phase specialize on t6102.scala]
+[running phase explicitouter on t6102.scala]
+[running phase erasure on t6102.scala]
+[running phase posterasure on t6102.scala]
+[running phase lazyvals on t6102.scala]
+[running phase lambdalift on t6102.scala]
+[running phase constructors on t6102.scala]
+[running phase flatten on t6102.scala]
+[running phase mixin on t6102.scala]
+[running phase cleanup on t6102.scala]
+[running phase icode on t6102.scala]
+[running phase inliner on t6102.scala]
+[running phase inlinehandlers on t6102.scala]
+[running phase closelim on t6102.scala]
+[running phase constopt on t6102.scala]
+[running phase dce on t6102.scala]
+[running phase jvm on icode]
+hello
diff --git a/test/files/run/t6113.scala b/test/files/run/t6113.scala
index 321cae86a3..b77a56029a 100644
--- a/test/files/run/t6113.scala
+++ b/test/files/run/t6113.scala
@@ -1,6 +1,8 @@
+import scala.language.higherKinds
+
trait Foo[C[_]]
object Test extends App {
import scala.reflect.runtime.universe._
println(typeOf[Foo[({type l[X] = (Int, X)})#l]])
-} \ No newline at end of file
+}
diff --git a/test/files/run/t6146b.check b/test/files/run/t6146b.check
index 49ff70697e..2333b9d986 100644
--- a/test/files/run/t6146b.check
+++ b/test/files/run/t6146b.check
@@ -1,3 +1,7 @@
+t6146b.scala:15: warning: match may not be exhaustive.
+It would fail on the following inputs: S2(), S3()
+ def foo(f: F[Int]) = f match { case X.S1 => }
+ ^
Type in expressions to have them evaluated.
Type :help for more information.
diff --git a/test/files/run/t6246.scala b/test/files/run/t6246.scala
index 28765e1adf..2db87aa6d2 100644
--- a/test/files/run/t6246.scala
+++ b/test/files/run/t6246.scala
@@ -1,5 +1,6 @@
import scala.reflect.{ClassTag, classTag}
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
def testValueClass(tag: ClassTag[_]) {
println(s"runtimeClass = ${tag.runtimeClass}, toString = ${tag.toString}")
@@ -23,4 +24,4 @@ object Test extends App {
testValueClass(ClassTag.Double)
testValueClass(ClassTag.Unit)
testValueClass(ClassTag.Boolean)
-} \ No newline at end of file
+}
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index 4465b6302c..a032a10de6 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of patmat]] // newSource1
+[[syntax trees at end of patmat]] // newSource1.scala
[7]package [7]<empty> {
[7]object Case3 extends [13][106]scala.AnyRef {
[106]def <init>(): [13]Case3.type = [106]{
diff --git a/test/files/run/t6308.check b/test/files/run/t6308.check
new file mode 100644
index 0000000000..e2577db72a
--- /dev/null
+++ b/test/files/run/t6308.check
@@ -0,0 +1,16 @@
+- Unspecialized type args
+// Specialized
+f1 f1$mIc$sp
+f2 f2$mIc$sp
+f3 f3$mIc$sp
+f4 f4$mIc$sp
+f5 f5$mIc$sp
+
+// Unspecialized type args
+f4(Boolean) f4
+f4(String) f4
+
+// Ideally these would be specialized
+todo1 todo1
+todo2 todo2
+todo3 todo3
diff --git a/test/files/run/t6308.scala b/test/files/run/t6308.scala
new file mode 100644
index 0000000000..bcd89359b0
--- /dev/null
+++ b/test/files/run/t6308.scala
@@ -0,0 +1,41 @@
+import scala.{specialized => sp}
+
+object Test {
+ def caller = new Exception().getStackTrace()(1).getMethodName
+ def f1[@sp(Int) A](a: A, b: Any) = caller
+ def f2[@sp(Int) A, B](a: A, b: String) = caller
+ def f3[B, @sp(Int) A](a: A, b: List[B]) = caller
+ def f4[B, @sp(Int) A](a: A, b: List[(A, B)]) = caller
+
+ def f5[@sp(Int) A, B <: Object](a: A, b: B) = caller
+
+ // `uncurryTreeType` calls a TypeMap on the call to this method and we end up with new
+ // type parameter symbols, which are not found in `TypeEnv.includes(typeEnv(member), env)`
+ // in `specSym`. (One of `uncurry`'s tasks is to expand type aliases in signatures.)
+ type T = Object
+ def todo1[@sp(Int) A, B <: T](a: A, b: String) = caller
+ def todo2[@sp(Int) A, B <: AnyRef](a: A, b: String) = caller
+ def todo3[B <: List[A], @specialized(Int) A](a: A, b: B) = caller
+
+ def main(args: Array[String]) {
+ val s = ""
+ val result =
+ s"""|- Unspecialized type args
+ |// Specialized
+ |f1 ${f1(1,"some ref")}
+ |f2 ${f2(1,"some ref")}
+ |f3 ${f3(1,List("some ref"))}
+ |f4 ${f4(1,Nil)}
+ |f5 ${f5(1,s)}
+ |
+ |// Unspecialized type args
+ |f4(Boolean) ${f4(Boolean,Nil)}
+ |f4(String) ${f4("",Nil)}
+ |
+ |// Ideally these would be specialized
+ |todo1 ${todo1(1,s)}
+ |todo2 ${todo2(1,s)}
+ |todo3 ${todo3(1,List(0))}""".stripMargin
+ println(result)
+ }
+}
diff --git a/test/files/run/t6309.check b/test/files/run/t6309.check
new file mode 100644
index 0000000000..7f8f011eb7
--- /dev/null
+++ b/test/files/run/t6309.check
@@ -0,0 +1 @@
+7
diff --git a/test/files/run/t6309.scala b/test/files/run/t6309.scala
new file mode 100644
index 0000000000..7bbca63c2a
--- /dev/null
+++ b/test/files/run/t6309.scala
@@ -0,0 +1,16 @@
+trait A {
+ def a: Int
+}
+
+object Test {
+ def f(a: Int) = new {
+ //private val b = a
+ private[this] val b = a // crashes, sorry scalac
+ } with A {
+ def a = b
+ }
+
+ def main(args: Array[String]) {
+ println(f(7).a)
+ }
+}
diff --git a/test/files/run/t6329_vanilla.scala b/test/files/run/t6329_vanilla.scala
index f2d843896d..ec84f1f938 100644
--- a/test/files/run/t6329_vanilla.scala
+++ b/test/files/run/t6329_vanilla.scala
@@ -1,5 +1,6 @@
import scala.reflect.classTag
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
println(classManifest[scala.List[_]])
println(classTag[scala.List[_]])
diff --git a/test/files/run/t6440.check b/test/files/run/t6440.check
index 69c253eab4..806279fb74 100644
--- a/test/files/run/t6440.check
+++ b/test/files/run/t6440.check
@@ -1,4 +1,4 @@
-pos: source-newSource1,line-9,offset=109 bad symbolic reference. A signature in U.class refers to term pack1
+pos: source-newSource1.scala,line-9,offset=109 bad symbolic reference. A signature in U.class refers to term pack1
in package <root> which is not available.
It may be completely missing from the current classpath, or the version on
the classpath might be incompatible with the version used when compiling U.class. ERROR
diff --git a/test/files/run/t6443.scala b/test/files/run/t6443.scala
index 67fe2cab22..3ab8c345fe 100644
--- a/test/files/run/t6443.scala
+++ b/test/files/run/t6443.scala
@@ -1,3 +1,5 @@
+import scala.language.existentials
+
class Base
class Derived extends Base
diff --git a/test/files/run/t6481.check b/test/files/run/t6481.check
new file mode 100644
index 0000000000..7ec29631b1
--- /dev/null
+++ b/test/files/run/t6481.check
@@ -0,0 +1,4 @@
+delayed init
+new foo(1, 2)
+delayed init
+new foo(b = 2, a = 1)
diff --git a/test/files/run/t6481.scala b/test/files/run/t6481.scala
new file mode 100644
index 0000000000..125da3b15a
--- /dev/null
+++ b/test/files/run/t6481.scala
@@ -0,0 +1,13 @@
+abstract class foo(a: Int, b: Int) extends scala.DelayedInit {
+ def delayedInit(x: => Unit) {
+ println("delayed init");
+ x
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new foo(1, 2) { println("new foo(1, 2)") }
+ new foo(b = 2, a = 1) { println("new foo(b = 2, a = 1)") }
+ }
+}
diff --git a/test/files/run/t6488.check b/test/files/run/t6488.check
deleted file mode 100644
index 35821117c8..0000000000
--- a/test/files/run/t6488.check
+++ /dev/null
@@ -1 +0,0 @@
-Success
diff --git a/test/files/run/t6488.scala b/test/files/run/t6488.scala
index 487614ecfd..e234876fbe 100644
--- a/test/files/run/t6488.scala
+++ b/test/files/run/t6488.scala
@@ -1,11 +1,64 @@
-import sys.process._
+import scala.sys.process._
+import scala.util.Try
+import scala.util.Properties.{ javaHome, javaClassPath }
+import java.io.{ File, IOException }
+import java.util.concurrent.CountDownLatch
+import java.util.concurrent.TimeUnit._
+import java.util.concurrent.atomic._
+
object Test {
+ /*
// Program that prints "Success" if the command was successfully run then destroyed
// It will silently pass if the command "/bin/ls" does not exist
- // It will fail due to the uncatchable exception in t6488 race condition
+ // It will fail due to the uncatchable exception in t6488 race condition,
+ // i.e., if any uncaught exceptions on spawned threads are printed.
def main(args: Array[String]) {
try Process("/bin/ls").run(ProcessLogger { _ => () }).destroy
catch { case _ => () }
println("Success")
}
+ */
+
+ // Show that no uncaught exceptions are thrown on spawned I/O threads
+ // when the process is destroyed. The default handler will print
+ // stack traces in the failing case.
+ def main(args: Array[String]) {
+ if (args.nonEmpty && args(0) == "data")
+ data()
+ else
+ test() // args(0) == "jvm"
+ }
+
+ // fork the data spewer, wait for input, then destroy the process
+ def test() {
+ val f = new File(javaHome, "bin").listFiles.sorted filter (_.getName startsWith "java") find (_.canExecute) getOrElse {
+ // todo signal test runner that test is skipped
+ new File("/bin/ls") // innocuous
+ }
+ //Process(f.getAbsolutePath).run(ProcessLogger { _ => () }).destroy
+ val reading = new CountDownLatch(1)
+ val count = new AtomicInteger
+ def counted = count.get
+ val command = s"${f.getAbsolutePath} -classpath ${javaClassPath} Test data"
+ Try {
+ Process(command) run ProcessLogger { (s: String) =>
+ //Console println s"[[$s]]" // java help
+ count.getAndIncrement
+ reading.countDown
+ Thread.`yield`()
+ }
+ } foreach { (p: Process) =>
+ val ok = reading.await(10, SECONDS)
+ if (!ok) Console println "Timed out waiting for process output!"
+ p.destroy()
+ }
+ //Console println s"Read count $counted lines"
+ }
+
+ // spew something
+ def data() {
+ def filler = "." * 100
+ for (i <- 1 to 1000)
+ Console println s"Outputting data line $i $filler"
+ }
}
diff --git a/test/files/run/t6555.check b/test/files/run/t6555.check
index 04117b7c2f..a18a8e8023 100644
--- a/test/files/run/t6555.check
+++ b/test/files/run/t6555.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of specialize]] // newSource1
+[[syntax trees at end of specialize]] // newSource1.scala
package <empty> {
class Foo extends Object {
def <init>(): Foo = {
diff --git a/test/files/run/t657.scala b/test/files/run/t657.scala
index a9726092e7..e76b0292dd 100644
--- a/test/files/run/t657.scala
+++ b/test/files/run/t657.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ implicitConversions }
abstract class BaseList {
type Node <: NodeImpl;
implicit def convertNode(ni : NodeImpl) = ni.asInstanceOf[Node];
diff --git a/test/files/run/t6690.scala b/test/files/run/t6690.scala
index 43ede967a0..15b1817e87 100644
--- a/test/files/run/t6690.scala
+++ b/test/files/run/t6690.scala
@@ -1,5 +1,7 @@
import scala.collection.mutable
+import scala.language.{ reflectiveCalls }
+
object Test extends App {
def last0(ml: mutable.MutableList[Int]) =
ml.asInstanceOf[{def last0: mutable.LinkedList[Int]}].last0
diff --git a/test/files/run/t6715.scala b/test/files/run/t6715.scala
new file mode 100644
index 0000000000..07ff34218a
--- /dev/null
+++ b/test/files/run/t6715.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class A {
+ def $$ = 1
+ def $times = 1
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val memberSet: Set[String] = typeOf[A].members.map{ _.toString }.toSet
+ assert(memberSet contains "method *")
+ assert(memberSet contains "method $$")
+ assert(! (memberSet contains "method"))
+ }
+}
diff --git a/test/files/run/t6731.scala b/test/files/run/t6731.scala
index 89d212e91e..12357b935e 100644
--- a/test/files/run/t6731.scala
+++ b/test/files/run/t6731.scala
@@ -12,7 +12,7 @@ abstract class MonoDynamic extends Dynamic {
def applyDynamic(name: String)(args: Any*): String = show(this + "." + name + mkArgs(args: _*))
def applyDynamicNamed(name: String)(args: (String, Any)*): String = show(this + "." + name + mkArgs(args: _*))
- override def toString = this.getClass.getName split '.' last
+ override def toString = (this.getClass.getName split '.').last
}
object Mono extends MonoDynamic {
diff --git a/test/files/run/t6793.scala b/test/files/run/t6793.scala
new file mode 100644
index 0000000000..0b1f1619af
--- /dev/null
+++ b/test/files/run/t6793.scala
@@ -0,0 +1,9 @@
+package a { class C1(private[a] val v0: String) }
+package b { class C2(v1: String) extends a.C1(v1) { def foo = v1 } }
+
+object Test extends App {
+ new b.C2("x")
+
+ val c2Fields = classOf[b.C2].getDeclaredFields
+ assert(c2Fields.size == 1, c2Fields.map(_.getName).toList)
+}
diff --git a/test/files/run/t6793b.scala b/test/files/run/t6793b.scala
new file mode 100644
index 0000000000..cb3f2fb2fa
--- /dev/null
+++ b/test/files/run/t6793b.scala
@@ -0,0 +1,11 @@
+package a {
+ class C1(val v0: String)
+ class C2(v1: String) extends a.C1(v1) { def foo = v1 }
+}
+
+object Test extends App {
+ new a.C2("x")
+
+ val c2Fields = classOf[a.C2].getDeclaredFields
+ assert(c2Fields.isEmpty, c2Fields.map(_.getName).mkString(", "))
+}
diff --git a/test/files/run/t6793c.scala b/test/files/run/t6793c.scala
new file mode 100644
index 0000000000..e28c7c81a1
--- /dev/null
+++ b/test/files/run/t6793c.scala
@@ -0,0 +1,11 @@
+package a {
+ class C1(private[a] val v0: String)
+ class C2(v1: String) extends a.C1(v1) { def foo = v1 }
+}
+
+object Test extends App {
+ new a.C2("x").foo
+
+ val c2Fields = classOf[a.C2].getDeclaredFields
+ assert(c2Fields.isEmpty, c2Fields.map(_.getName).toList)
+}
diff --git a/test/files/run/t6863.check b/test/files/run/t6863.check
new file mode 100644
index 0000000000..030cb8b265
--- /dev/null
+++ b/test/files/run/t6863.check
@@ -0,0 +1,12 @@
+t6863.scala:38: warning: comparing values of types Unit and Unit using `==' will always yield true
+ assert({ () => x}.apply == ())
+ ^
+t6863.scala:42: warning: comparing values of types Unit and Unit using `==' will always yield true
+ assert({ () => x}.apply == ())
+ ^
+t6863.scala:46: warning: comparing values of types Unit and Unit using `==' will always yield true
+ assert({ () => x}.apply == ())
+ ^
+t6863.scala:59: warning: comparing values of types Unit and Unit using `==' will always yield true
+ assert({ () => x }.apply == ())
+ ^
diff --git a/test/files/run/t6863.scala b/test/files/run/t6863.scala
index d77adb6af4..c4f3c2c885 100644
--- a/test/files/run/t6863.scala
+++ b/test/files/run/t6863.scala
@@ -51,7 +51,7 @@ object Test {
assert({ () => x }.apply == "42")
}
def tryCatch() = {
- var x = try { "42" } catch { case _ => "43" }
+ var x = try { "42" } catch { case _: Throwable => "43" }
assert({ () => x }.apply == "42")
}
def `if`() = {
@@ -85,7 +85,7 @@ object Test {
def nested() = {
var x = {
val y = 42
- if(true) try "42" catch {case _ => "43"}
+ if(true) try "42" catch {case _: Throwable => "43"}
else "44"
}
assert({ () => x }.apply == "42")
diff --git a/test/files/run/t6900.scala b/test/files/run/t6900.scala
new file mode 100644
index 0000000000..a29d388129
--- /dev/null
+++ b/test/files/run/t6900.scala
@@ -0,0 +1,36 @@
+import annotation.tailrec
+
+trait Universe {
+ type T <: AnyRef
+}
+
+final class Bug {
+ var i = 1
+ def stop() = { i -= 1; i < 0 }
+ // the alias bypasses the fast path in erasures InfoTransformer
+ // predicated on `TypeMap.noChangeToSymbols`
+ type Alias = Any
+
+ @tailrec
+ // So we get two symbols for `universe`, the original on the ValDef
+ // and a clone in the MethodType of `f`.
+ def f(universe: Universe, l: Alias): universe.T = {
+ if (stop()) null.asInstanceOf[universe.T] else f(universe, null)
+ }
+
+ @tailrec
+ def g(universe: Universe)(l: Alias): universe.T = {
+ if (stop()) null.asInstanceOf[universe.T] else g(universe)(l)
+ }
+
+ @tailrec
+ def h(universe: Universe)(l: List[universe.T]): List[universe.T] = {
+ if (stop()) Nil else h(universe)(l)
+ }
+}
+
+object Test extends App {
+ assert(new Bug().f(null, null) == null)
+ assert(new Bug().g(null)(null) == null)
+ assert(new Bug().h(null)(null) == Nil)
+} \ No newline at end of file
diff --git a/test/files/run/t6937.check b/test/files/run/t6937.check
new file mode 100644
index 0000000000..9a1fa4cfaf
--- /dev/null
+++ b/test/files/run/t6937.check
@@ -0,0 +1,26 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{universe=>ru}
+
+scala> import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> import scala.reflect.api.{Universe => ApiUniverse}
+import scala.reflect.api.{Universe=>ApiUniverse}
+
+scala> class A
+defined class A
+
+scala> lazy val apiru = ru: ApiUniverse
+apiru: scala.reflect.api.Universe = <lazy>
+
+scala> apiru.typeTag[A].in(cm)
+res0: reflect.runtime.universe.TypeTag[A] = TypeTag[A]
+
+scala>
+
+scala>
diff --git a/test/files/run/t6937.scala b/test/files/run/t6937.scala
new file mode 100644
index 0000000000..4b30894bf3
--- /dev/null
+++ b/test/files/run/t6937.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ import scala.reflect.runtime.{universe => ru}
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.reflect.api.{Universe => ApiUniverse}
+ class A
+ lazy val apiru = ru: ApiUniverse
+ apiru.typeTag[A].in(cm)
+ """
+} \ No newline at end of file
diff --git a/test/files/run/t6969.scala b/test/files/run/t6969.scala
index 8cfc28c1e5..c4561b4424 100644
--- a/test/files/run/t6969.scala
+++ b/test/files/run/t6969.scala
@@ -1,3 +1,7 @@
+
+
+import scala.language.{ reflectiveCalls }
+
object Test {
private type Clearable = { def clear(): Unit }
private def choke() = {
diff --git a/test/files/run/t6989/JavaClass_1.java b/test/files/run/t6989/JavaClass_1.java
index eb26a08700..72ec4d6ab6 100644
--- a/test/files/run/t6989/JavaClass_1.java
+++ b/test/files/run/t6989/JavaClass_1.java
@@ -7,6 +7,8 @@ package foo;
// I'm leaving the incorrect results of FromJavaClassCompleters in the check
// file, so that we get notified when something changes there.
+// ^^^ It's not clear what those incorrect results were, but the fix for SI-7359
+// (backport of fix for SI-6548) has probably resolved some of these. OP, please revisit this comment.
class PackagePrivateJavaClass {
private int privateField = 0;
diff --git a/test/files/run/t7047.check b/test/files/run/t7047.check
new file mode 100644
index 0000000000..32bd581094
--- /dev/null
+++ b/test/files/run/t7047.check
@@ -0,0 +1,3 @@
+Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ Macros.foo
+ ^
diff --git a/test/files/run/t7047/Impls_Macros_1.scala b/test/files/run/t7047/Impls_Macros_1.scala
new file mode 100644
index 0000000000..2992e3efe4
--- /dev/null
+++ b/test/files/run/t7047/Impls_Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+class Foo
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ try {
+ c.inferImplicitValue(typeOf[Foo], silent = false)
+ c.abort(c.enclosingPosition, "silent=false is not working")
+ } catch {
+ case _: Exception =>
+ }
+ c.literalNull
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-c/Test_2.scala b/test/files/run/t7047/Test_2.scala
index 7dffc5107d..acfddae942 100644
--- a/test/files/run/macro-def-path-dependent-c/Test_2.scala
+++ b/test/files/run/t7047/Test_2.scala
@@ -1,3 +1,3 @@
object Test extends App {
- println("it works")
+ Macros.foo
} \ No newline at end of file
diff --git a/test/files/run/t7088.check b/test/files/run/t7088.check
new file mode 100644
index 0000000000..1191247b6d
--- /dev/null
+++ b/test/files/run/t7088.check
@@ -0,0 +1,2 @@
+1
+2
diff --git a/test/files/run/t7088.scala b/test/files/run/t7088.scala
new file mode 100644
index 0000000000..5f0114b940
--- /dev/null
+++ b/test/files/run/t7088.scala
@@ -0,0 +1,13 @@
+object Test {
+ type Tag[X] = {type Tag = X}
+ type TaggedArray[T] = Array[T] with Tag[Any]
+
+ def method[T: scala.reflect.ClassTag](a: TaggedArray[T], value: T) {
+ a.update(0, value)
+ a foreach println
+ }
+
+ def main(args: Array[String]): Unit = {
+ method(Array(1, 2).asInstanceOf[TaggedArray[Int]], 1)
+ }
+}
diff --git a/test/files/run/t7096.scala b/test/files/run/t7096.scala
index 2a93dcc571..2495102899 100644
--- a/test/files/run/t7096.scala
+++ b/test/files/run/t7096.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ */
import scala.tools.partest._
import scala.tools.nsc._
diff --git a/test/files/run/t7120b.scala b/test/files/run/t7120b.scala
index 9f6591aa06..0be4eb70a7 100644
--- a/test/files/run/t7120b.scala
+++ b/test/files/run/t7120b.scala
@@ -1,3 +1,6 @@
+
+import scala.language.higherKinds
+
trait Base[A] { type B = A; }
class C extends Base[String] {
class D {
diff --git a/test/files/run/t7151.check b/test/files/run/t7151.check
new file mode 100644
index 0000000000..d532d9589f
--- /dev/null
+++ b/test/files/run/t7151.check
@@ -0,0 +1,6 @@
+class Test$InnerObject$ isFinal = false
+class Test$InnerCase isFinal = true
+class Test$InnerNonCase isFinal = true
+class TopLevelObject$ isFinal = true
+class TopLevelCase isFinal = true
+class TopLevelNonCase isFinal = true
diff --git a/test/files/run/t7151.scala b/test/files/run/t7151.scala
new file mode 100644
index 0000000000..f6492ba43c
--- /dev/null
+++ b/test/files/run/t7151.scala
@@ -0,0 +1,24 @@
+import java.lang.reflect.Modifier.isFinal
+
+object Test {
+ object InnerObject
+ final case class InnerCase()
+ final class InnerNonCase()
+
+ def main(args: Array[String]) {
+ def checkFinal(clazz: Class[_]) =
+ println(s"${clazz} isFinal = ${isFinal(clazz.getModifiers())}")
+
+ checkFinal(InnerObject.getClass)
+ checkFinal(classOf[InnerCase])
+ checkFinal(classOf[InnerNonCase])
+
+ checkFinal(TopLevelObject.getClass)
+ checkFinal(classOf[TopLevelCase])
+ checkFinal(classOf[TopLevelNonCase])
+ }
+}
+
+object TopLevelObject
+final case class TopLevelCase()
+final case class TopLevelNonCase()
diff --git a/test/files/run/t7157.check b/test/files/run/t7157.check
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/test/files/run/t7157.check
@@ -0,0 +1 @@
+1
diff --git a/test/files/run/t7157/Impls_Macros_1.scala b/test/files/run/t7157/Impls_Macros_1.scala
new file mode 100644
index 0000000000..ad3d96eb85
--- /dev/null
+++ b/test/files/run/t7157/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ object AImpl {
+ def a(ctx: Context)(args: ctx.Expr[Any]*): ctx.Expr[Unit] = {
+ import ctx.universe._
+ ctx.Expr[Unit](Apply(Ident(TermName("println")), List(Literal(Constant(1)))))
+ }
+ }
+
+ implicit class A(context: StringContext) {
+ def a(args: Any*): Unit = macro AImpl.a
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c/Test_2.scala b/test/files/run/t7157/Test_2.scala
index e75a8ba101..cceb5ca177 100644
--- a/test/files/neg/macro-invalidimpl-c/Test_2.scala
+++ b/test/files/run/t7157/Test_2.scala
@@ -1,3 +1,5 @@
+import Macros._
+
object Test extends App {
- new Macros().foo(42)
+ a""
} \ No newline at end of file
diff --git a/test/files/run/t7171.check b/test/files/run/t7171.check
new file mode 100644
index 0000000000..d826f6cb94
--- /dev/null
+++ b/test/files/run/t7171.check
@@ -0,0 +1,3 @@
+t7171.scala:2: warning: The outer reference in this type test cannot be checked at run time.
+ final case class A()
+ ^
diff --git a/test/files/run/t7171.flags b/test/files/run/t7171.flags
new file mode 100644
index 0000000000..c02e5f2461
--- /dev/null
+++ b/test/files/run/t7171.flags
@@ -0,0 +1 @@
+-unchecked
diff --git a/test/files/run/t7171.scala b/test/files/run/t7171.scala
index 97585b9860..e0a1192228 100644
--- a/test/files/run/t7171.scala
+++ b/test/files/run/t7171.scala
@@ -16,7 +16,7 @@ object Test extends App {
val a1 = new t1.A()
val a2 = new t1.A()
assert(t1.foo(a1))
- // as noted in the unchecked warning (tested in the corresponding neg test),
+ // as noted in the unchecked warning (also tested in the corresponding neg test),
// the outer pointer isn't checked
assert(t1.foo(a2))
}
diff --git a/test/files/run/t7198.check b/test/files/run/t7198.check
new file mode 100644
index 0000000000..6dad496f49
--- /dev/null
+++ b/test/files/run/t7198.check
@@ -0,0 +1,2 @@
+The quick brown fox jumped
+And ran away with the vixen.
diff --git a/test/files/run/t7198.scala b/test/files/run/t7198.scala
new file mode 100644
index 0000000000..26e1d8805a
--- /dev/null
+++ b/test/files/run/t7198.scala
@@ -0,0 +1,9 @@
+/* spew a few lines
+ * filter: Over the moon
+ */
+object Test extends App {
+ Console println "The quick brown fox jumped"
+ Console println "Over the moon"
+ Console println "And ran away with the vixen."
+ Console println "Java HotSpot(TM) 64-Bit Server VM warning: Failed to reserve shared memory (errno = 28)."
+}
diff --git a/test/files/run/t7200.scala b/test/files/run/t7200.scala
new file mode 100644
index 0000000000..ba342df14d
--- /dev/null
+++ b/test/files/run/t7200.scala
@@ -0,0 +1,34 @@
+import language.higherKinds
+
+object Test extends App {
+
+ // Slice of comonad is where this came up
+ trait Foo[F[_]] {
+ def coflatMap[A, B](f: F[A] => B): F[A] => F[B]
+ }
+
+ // A non-empty list
+ case class Nel[A](head: A, tail: List[A])
+
+ object NelFoo extends Foo[Nel] {
+
+ // It appears that the return type for recursive calls is not inferred
+ // properly, yet no warning is issued. Providing a return type or
+ // type arguments for the recursive call fixes the problem.
+
+ def coflatMap[A, B](f: Nel[A] => B) = // ok w/ return type
+ l => Nel(f(l), l.tail match {
+ case Nil => Nil
+ case h :: t => {
+ val r = coflatMap(f)(Nel(h, t)) // ok w/ type args
+ r.head :: r.tail
+ }
+ })
+ }
+
+ // Without a recursive call all is well, but with recursion we get a
+ // ClassCastException from Integer to Nothing
+ NelFoo.coflatMap[Int, Int](_.head + 1)(Nel(1, Nil)) // Ok
+ NelFoo.coflatMap[Int, Int](_.head + 1)(Nel(1, List(2))) // CCE
+
+}
diff --git a/test/files/run/t7240/Test_2.scala b/test/files/run/t7240/Test_2.scala
index 2450bdabf9..5cc2cc7f78 100644
--- a/test/files/run/t7240/Test_2.scala
+++ b/test/files/run/t7240/Test_2.scala
@@ -1,3 +1,3 @@
object Test extends App {
- bakery.Bakery.failure
-} \ No newline at end of file
+ val v = bakery.Bakery.failure
+}
diff --git a/test/files/run/t7271.check b/test/files/run/t7271.check
new file mode 100644
index 0000000000..f7a23018ca
--- /dev/null
+++ b/test/files/run/t7271.check
@@ -0,0 +1,12 @@
+[[syntax trees at end of parser]] // newSource1.scala
+[6]package [6]<empty> {
+ [6]class C extends [8][91]scala.AnyRef {
+ [8]def <init>() = [8]{
+ [NoPosition][NoPosition][NoPosition]super.<init>();
+ [8]()
+ };
+ [20]def quote = [28][28][28][28]StringContext([30]"foo", [40]"baz").s([35]this);
+ [55]def tripleQuote = [69][69][69][69]StringContext([71]"foo", [81]"baz").s([76]this)
+ }
+}
+
diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala
new file mode 100644
index 0000000000..cb43331a29
--- /dev/null
+++ b/test/files/run/t7271.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+import scala.reflect.internal.Positions
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:parser -Ystop-after:parser -d " + testOutput.path
+
+ override def code = """
+ class C {
+ def quote = s"foo${this}baz"
+ def tripleQuote = s"foo${this}baz"
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newCompiler(args: String*): Global = {
+
+ val settings = new Settings()
+ settings.Xprintpos.value = true
+ val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ new Global(command.settings, new ConsoleReporter(settings)) with Positions
+ }
+}
diff --git a/test/files/run/t7290.check b/test/files/run/t7290.check
new file mode 100644
index 0000000000..aff48abd4a
--- /dev/null
+++ b/test/files/run/t7290.check
@@ -0,0 +1,6 @@
+t7290.scala:4: warning: Pattern contains duplicate alternatives: 0
+ case 0 | 0 => 0
+ ^
+t7290.scala:5: warning: Pattern contains duplicate alternatives: 2, 3
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ ^
diff --git a/test/files/run/t7291.check b/test/files/run/t7291.check
new file mode 100644
index 0000000000..c07ba986a3
--- /dev/null
+++ b/test/files/run/t7291.check
@@ -0,0 +1,2 @@
+conjure
+traversable
diff --git a/test/files/run/t7291.scala b/test/files/run/t7291.scala
new file mode 100644
index 0000000000..d996c615ce
--- /dev/null
+++ b/test/files/run/t7291.scala
@@ -0,0 +1,22 @@
+
+import scala.language.{ higherKinds, implicitConversions }
+
+trait Fooable[T]
+object Fooable {
+ implicit def conjure[T]: Fooable[T] = {
+ println("conjure")
+ new Fooable[T]{}
+ }
+
+}
+
+object Test {
+ implicit def traversable[T, Coll[_] <: Traversable[_]](implicit
+elem: Fooable[T]): Fooable[Coll[T]] = {
+ println("traversable")
+ new Fooable[Coll[T]]{}
+ }
+ def main(args: Array[String]) {
+ implicitly[Fooable[List[Any]]]
+ }
+}
diff --git a/test/files/run/t7300.check b/test/files/run/t7300.check
new file mode 100644
index 0000000000..51993f072d
--- /dev/null
+++ b/test/files/run/t7300.check
@@ -0,0 +1,2 @@
+2
+2
diff --git a/test/files/run/t7300.scala b/test/files/run/t7300.scala
new file mode 100644
index 0000000000..ec841690df
--- /dev/null
+++ b/test/files/run/t7300.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ // single line comment in multi line comment
+ /*//*/ val x = 1 */*/
+ val x = 2
+ println(x)
+
+ // single line comment in nested multi line comment
+ /*/*//*/ val y = 1 */*/*/
+ val y = 2
+ println(y)
+}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
new file mode 100644
index 0000000000..b5081f2d00
--- /dev/null
+++ b/test/files/run/t7319.check
@@ -0,0 +1,45 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class M[A]
+defined class M
+
+scala> implicit def ma0[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma0: [A](a: A)M[A]
+
+scala> implicit def ma1[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma1: [A](a: A)M[A]
+
+scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+warning: there were 1 feature warning(s); re-run with -feature for details
+convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
+
+scala> convert(Some[Int](0))
+<console>:12: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : Some[Int]
+ required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ convert(Some[Int](0))
+ ^
+<console>:12: error: type mismatch;
+ found : Some[Int]
+ required: F[_ <: F[_]]
+ convert(Some[Int](0))
+ ^
+
+scala> Range(1,2).toArray: Seq[_]
+<console>:11: error: polymorphic expression cannot be instantiated to expected type;
+ found : [B >: Int]Array[B]
+ required: Seq[_]
+ Range(1,2).toArray: Seq[_]
+ ^
+
+scala> 0
+res2: Int = 0
+
+scala>
diff --git a/test/files/run/t7319.scala b/test/files/run/t7319.scala
new file mode 100644
index 0000000000..65a3ed922d
--- /dev/null
+++ b/test/files/run/t7319.scala
@@ -0,0 +1,14 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // so we can provide the ambiguities, rather than relying in Predef implicits
+ override def extraSettings = "-Yno-predef"
+ override def code = """
+class M[A]
+implicit def ma0[A](a: A): M[A] = null
+implicit def ma1[A](a: A): M[A] = null
+def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+convert(Some[Int](0))
+Range(1,2).toArray: Seq[_]
+0""" // before the fix, this line, and all that followed, re-issued the implicit ambiguity error.
+}
diff --git a/test/files/run/t7325.check b/test/files/run/t7325.check
new file mode 100644
index 0000000000..3c7652f42c
--- /dev/null
+++ b/test/files/run/t7325.check
@@ -0,0 +1,19 @@
+%
+%%
+%%%
+%n
+%
+
+%%n
+%%
+
+%%%n
+%%%
+
+0
+0%d
+0%%d
+0
+
+0
+
diff --git a/test/files/run/t7325.scala b/test/files/run/t7325.scala
new file mode 100644
index 0000000000..26f6bc6ef7
--- /dev/null
+++ b/test/files/run/t7325.scala
@@ -0,0 +1,25 @@
+object Test extends App {
+ // println(f"%")
+ println(f"%%")
+ // println(f"%%%")
+ println(f"%%%%")
+ // println(f"%%%%%")
+ println(f"%%%%%%")
+
+ println(f"%%n")
+ println(f"%%%n")
+ println(f"%%%%n")
+ println(f"%%%%%n")
+ println(f"%%%%%%n")
+ println(f"%%%%%%%n")
+
+ // println(f"${0}%")
+ println(f"${0}%d")
+ println(f"${0}%%d")
+ // println(f"${0}%%%d")
+ println(f"${0}%%%%d")
+ // println(f"${0}%%%%%d")
+
+ println(f"${0}%n")
+ println(f"${0}%d%n")
+} \ No newline at end of file
diff --git a/test/files/run/t7337.check b/test/files/run/t7337.check
new file mode 100644
index 0000000000..dd2b31f23c
--- /dev/null
+++ b/test/files/run/t7337.check
@@ -0,0 +1 @@
+doesnotexist does not exist or is not a directory
diff --git a/test/files/run/t7337.scala b/test/files/run/t7337.scala
new file mode 100644
index 0000000000..d878182ed0
--- /dev/null
+++ b/test/files/run/t7337.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+import util.{CommandLineParser}
+
+object Test extends DirectTest {
+ override def code = "class C"
+ override def newCompiler(args: String*): Global = {
+ val settings = newSettings((CommandLineParser tokenize ("-d doesnotexist " + extraSettings)) ++ args.toList)
+ newCompiler(settings)
+ }
+
+ override def show() {
+ try {
+ newCompiler()
+ } catch {
+ case fe: FatalError => println(fe.getMessage)
+ }
+ }
+}
diff --git a/test/files/run/t7341.check b/test/files/run/t7341.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t7341.check
diff --git a/test/files/run/t7341.flags b/test/files/run/t7341.flags
new file mode 100755
index 0000000000..ae08446055
--- /dev/null
+++ b/test/files/run/t7341.flags
@@ -0,0 +1 @@
+-Xcheckinit \ No newline at end of file
diff --git a/test/files/run/t7341.scala b/test/files/run/t7341.scala
new file mode 100755
index 0000000000..dc526c6c19
--- /dev/null
+++ b/test/files/run/t7341.scala
@@ -0,0 +1,15 @@
+object Obj {
+ private var cache: Any = ()
+ def returning(f: () => Unit) = ()
+ def foo {
+ returning(() => cache = ())
+ }
+
+ def apply(): Any = {
+ cache
+ }
+}
+
+object Test extends App {
+ Obj()
+}
diff --git a/test/files/run/t7359.check b/test/files/run/t7359.check
new file mode 100644
index 0000000000..9766475a41
--- /dev/null
+++ b/test/files/run/t7359.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t7359/Cyclic_1.java b/test/files/run/t7359/Cyclic_1.java
new file mode 100644
index 0000000000..42b46c1aed
--- /dev/null
+++ b/test/files/run/t7359/Cyclic_1.java
@@ -0,0 +1,3 @@
+abstract class Cyclic {
+ static interface Inner<T extends Inner> { }
+} \ No newline at end of file
diff --git a/test/files/run/t7359/Test_2.scala b/test/files/run/t7359/Test_2.scala
new file mode 100644
index 0000000000..bb6f4cb2d9
--- /dev/null
+++ b/test/files/run/t7359/Test_2.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ typeOf[Cyclic].members
+ println("ok")
+} \ No newline at end of file
diff --git a/test/files/run/t7375a.check b/test/files/run/t7375a.check
new file mode 100644
index 0000000000..a0a15dfb2f
--- /dev/null
+++ b/test/files/run/t7375a.check
@@ -0,0 +1,4 @@
+C1
+C2
+C1
+C2
diff --git a/test/files/run/t7375a.scala b/test/files/run/t7375a.scala
new file mode 100644
index 0000000000..e46ad08f63
--- /dev/null
+++ b/test/files/run/t7375a.scala
@@ -0,0 +1,16 @@
+import scala.reflect.ClassTag
+
+class C1(val n: Int) extends AnyVal
+class C2(val n: Int) extends AnyRef
+
+object Test {
+ type F1 = C1
+ type F2 = C2
+
+ def main(args: Array[String]): Unit = {
+ println(implicitly[ClassTag[C1]])
+ println(implicitly[ClassTag[C2]])
+ println(implicitly[ClassTag[F1]])
+ println(implicitly[ClassTag[F2]])
+ }
+}
diff --git a/test/files/run/t7375b.check b/test/files/run/t7375b.check
new file mode 100644
index 0000000000..d7578e28ba
--- /dev/null
+++ b/test/files/run/t7375b.check
@@ -0,0 +1,4 @@
+Predef.this.classOf[C1]
+Predef.this.classOf[C2]
+Predef.this.classOf[C1]
+Predef.this.classOf[C2]
diff --git a/test/files/run/t7375b/Macros_1.scala b/test/files/run/t7375b/Macros_1.scala
new file mode 100644
index 0000000000..70e79cc2b4
--- /dev/null
+++ b/test/files/run/t7375b/Macros_1.scala
@@ -0,0 +1,18 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+class C1(val n: Int) extends AnyVal
+class C2(val n: Int) extends AnyRef
+
+object Macros {
+ type F1 = C1
+ type F2 = C2
+
+ def foo = macro impl
+ def impl(c: Context) = {
+ import c.universe._
+ def test[T: c.TypeTag] = reify(println(c.literal(c.reifyRuntimeClass(c.typeOf[T]).toString).splice)).tree
+ def tests = Block(List(test[C1], test[C2], test[F1], test[F2]), Literal(Constant(())))
+ c.Expr[Unit](tests)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1/Test_2.scala b/test/files/run/t7375b/Test_2.scala
index 7dffc5107d..acfddae942 100644
--- a/test/files/run/macro-def-path-dependent-d1/Test_2.scala
+++ b/test/files/run/t7375b/Test_2.scala
@@ -1,3 +1,3 @@
object Test extends App {
- println("it works")
+ Macros.foo
} \ No newline at end of file
diff --git a/test/files/run/t7398.scala b/test/files/run/t7398.scala
new file mode 100644
index 0000000000..dd59697b71
--- /dev/null
+++ b/test/files/run/t7398.scala
@@ -0,0 +1,28 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+
+ // This way we auto-pass on non-java8 since there's nothing to check
+ override lazy val units: List[CompilationUnit] = testUnderJavaAtLeast("1.8") {
+ javaCompilationUnits(global)(defaultMethodSource)
+ } otherwise {
+ Nil
+ }
+
+ private def defaultMethodSource = """
+public interface Iterator<E> {
+ boolean hasNext();
+ E next();
+ default void remove() {
+ throw new UnsupportedOperationException("remove");
+ }
+ default void forEachRemaining(Consumer<? super E> action) {
+ throw new UnsupportedOperationException("forEachRemaining");
+ }
+}
+ """
+
+ // We're only checking we can parse it.
+ def check(source: String, unit: global.CompilationUnit): Unit = ()
+}
diff --git a/test/files/run/t7436.scala b/test/files/run/t7436.scala
new file mode 100644
index 0000000000..867a931e05
--- /dev/null
+++ b/test/files/run/t7436.scala
@@ -0,0 +1,9 @@
+class A(val p: Int*)
+
+class B(val p1: Int) extends A(p1)
+
+object Test {
+ def main(args: Array[String]) {
+ new B(1).p1 // threw java.lang.ClassCastException: scala.collection.mutable.WrappedArray$ofInt cannot be cast to java.lang.Integer
+ }
+}
diff --git a/test/files/run/t7482a.check b/test/files/run/t7482a.check
new file mode 100644
index 0000000000..b26e685784
--- /dev/null
+++ b/test/files/run/t7482a.check
@@ -0,0 +1,14 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+v: java.util.ArrayList[String] = []
+
+scala> val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+v: java.util.ArrayList[String] = []
+
+scala>
+
+scala>
diff --git a/test/files/run/t7482a.scala b/test/files/run/t7482a.scala
new file mode 100644
index 0000000000..d674558b98
--- /dev/null
+++ b/test/files/run/t7482a.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+ val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+ val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
+ """
+} \ No newline at end of file
diff --git a/test/files/run/t7498.scala b/test/files/run/t7498.scala
new file mode 100644
index 0000000000..1dbf0597e0
--- /dev/null
+++ b/test/files/run/t7498.scala
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+
+object Test extends App {
+ import scala.collection.concurrent.TrieMap
+
+ class Collision(val idx: Int) {
+ override def hashCode = idx % 10
+ }
+
+ val tm = TrieMap[Collision, Unit]()
+ for (i <- 0 until 1000) tm(new Collision(i)) = ()
+
+ tm.par.foreach(kv => ())
+}
+
diff --git a/test/files/run/t7507.scala b/test/files/run/t7507.scala
new file mode 100644
index 0000000000..6c1959ddac
--- /dev/null
+++ b/test/files/run/t7507.scala
@@ -0,0 +1,31 @@
+trait Cake extends Slice
+
+// Minimization
+trait Slice { self: Cake => // must have self type that extends `Slice`
+ private[this] val bippy = () // must be private[this]
+ locally(bippy)
+}
+
+// Originally reported bug:
+trait Cake1 extends Slice1
+trait Slice1 { self: Cake1 =>
+ import java.lang.String // any import will do!
+ val Tuple2(x, y) = ((1, 2))
+}
+
+
+// Nesting
+trait Cake3 extends Outer.Slice3
+
+// Minimization
+object Outer {
+ private[this] val bippy = ()
+ trait Slice3 { self: Cake3 =>
+ locally(bippy)
+ }
+}
+
+object Test extends App {
+ val s1 = new Cake1 {}
+ assert((s1.x, s1.y) == (1, 2), (s1.x, s1.y))
+}
diff --git a/test/files/run/t7556.check b/test/files/run/t7556.check
new file mode 100644
index 0000000000..3328708a6d
--- /dev/null
+++ b/test/files/run/t7556.check
@@ -0,0 +1,2 @@
+class annotations: List(scala.reflect.ScalaLongSignature)
+3001 decls via runtime reflection
diff --git a/test/files/run/t7556/Test_2.scala b/test/files/run/t7556/Test_2.scala
new file mode 100644
index 0000000000..31848738ef
--- /dev/null
+++ b/test/files/run/t7556/Test_2.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ def main(args: Array[String]) {
+ val mc = new MegaClass
+ val anns = mc.getClass.getAnnotations.map(_.annotationType.getName).toList.sorted
+ println(s"class annotations: $anns")
+ val N = typeTag[MegaClass].tpe.declarations.size // was: error reading Scala signature of MegaClass: 65935
+ println(s"$N decls via runtime reflection")
+ }
+}
diff --git a/test/files/run/t7556/mega-class_1.scala b/test/files/run/t7556/mega-class_1.scala
new file mode 100644
index 0000000000..dcc9ba8bcd
--- /dev/null
+++ b/test/files/run/t7556/mega-class_1.scala
@@ -0,0 +1,3002 @@
+class MegaClass {
+ def method0: Int = 0
+ def method1: Int = 0
+ def method2: Int = 0
+ def method3: Int = 0
+ def method4: Int = 0
+ def method5: Int = 0
+ def method6: Int = 0
+ def method7: Int = 0
+ def method8: Int = 0
+ def method9: Int = 0
+ def method10: Int = 0
+ def method11: Int = 0
+ def method12: Int = 0
+ def method13: Int = 0
+ def method14: Int = 0
+ def method15: Int = 0
+ def method16: Int = 0
+ def method17: Int = 0
+ def method18: Int = 0
+ def method19: Int = 0
+ def method20: Int = 0
+ def method21: Int = 0
+ def method22: Int = 0
+ def method23: Int = 0
+ def method24: Int = 0
+ def method25: Int = 0
+ def method26: Int = 0
+ def method27: Int = 0
+ def method28: Int = 0
+ def method29: Int = 0
+ def method30: Int = 0
+ def method31: Int = 0
+ def method32: Int = 0
+ def method33: Int = 0
+ def method34: Int = 0
+ def method35: Int = 0
+ def method36: Int = 0
+ def method37: Int = 0
+ def method38: Int = 0
+ def method39: Int = 0
+ def method40: Int = 0
+ def method41: Int = 0
+ def method42: Int = 0
+ def method43: Int = 0
+ def method44: Int = 0
+ def method45: Int = 0
+ def method46: Int = 0
+ def method47: Int = 0
+ def method48: Int = 0
+ def method49: Int = 0
+ def method50: Int = 0
+ def method51: Int = 0
+ def method52: Int = 0
+ def method53: Int = 0
+ def method54: Int = 0
+ def method55: Int = 0
+ def method56: Int = 0
+ def method57: Int = 0
+ def method58: Int = 0
+ def method59: Int = 0
+ def method60: Int = 0
+ def method61: Int = 0
+ def method62: Int = 0
+ def method63: Int = 0
+ def method64: Int = 0
+ def method65: Int = 0
+ def method66: Int = 0
+ def method67: Int = 0
+ def method68: Int = 0
+ def method69: Int = 0
+ def method70: Int = 0
+ def method71: Int = 0
+ def method72: Int = 0
+ def method73: Int = 0
+ def method74: Int = 0
+ def method75: Int = 0
+ def method76: Int = 0
+ def method77: Int = 0
+ def method78: Int = 0
+ def method79: Int = 0
+ def method80: Int = 0
+ def method81: Int = 0
+ def method82: Int = 0
+ def method83: Int = 0
+ def method84: Int = 0
+ def method85: Int = 0
+ def method86: Int = 0
+ def method87: Int = 0
+ def method88: Int = 0
+ def method89: Int = 0
+ def method90: Int = 0
+ def method91: Int = 0
+ def method92: Int = 0
+ def method93: Int = 0
+ def method94: Int = 0
+ def method95: Int = 0
+ def method96: Int = 0
+ def method97: Int = 0
+ def method98: Int = 0
+ def method99: Int = 0
+ def method100: Int = 0
+ def method101: Int = 0
+ def method102: Int = 0
+ def method103: Int = 0
+ def method104: Int = 0
+ def method105: Int = 0
+ def method106: Int = 0
+ def method107: Int = 0
+ def method108: Int = 0
+ def method109: Int = 0
+ def method110: Int = 0
+ def method111: Int = 0
+ def method112: Int = 0
+ def method113: Int = 0
+ def method114: Int = 0
+ def method115: Int = 0
+ def method116: Int = 0
+ def method117: Int = 0
+ def method118: Int = 0
+ def method119: Int = 0
+ def method120: Int = 0
+ def method121: Int = 0
+ def method122: Int = 0
+ def method123: Int = 0
+ def method124: Int = 0
+ def method125: Int = 0
+ def method126: Int = 0
+ def method127: Int = 0
+ def method128: Int = 0
+ def method129: Int = 0
+ def method130: Int = 0
+ def method131: Int = 0
+ def method132: Int = 0
+ def method133: Int = 0
+ def method134: Int = 0
+ def method135: Int = 0
+ def method136: Int = 0
+ def method137: Int = 0
+ def method138: Int = 0
+ def method139: Int = 0
+ def method140: Int = 0
+ def method141: Int = 0
+ def method142: Int = 0
+ def method143: Int = 0
+ def method144: Int = 0
+ def method145: Int = 0
+ def method146: Int = 0
+ def method147: Int = 0
+ def method148: Int = 0
+ def method149: Int = 0
+ def method150: Int = 0
+ def method151: Int = 0
+ def method152: Int = 0
+ def method153: Int = 0
+ def method154: Int = 0
+ def method155: Int = 0
+ def method156: Int = 0
+ def method157: Int = 0
+ def method158: Int = 0
+ def method159: Int = 0
+ def method160: Int = 0
+ def method161: Int = 0
+ def method162: Int = 0
+ def method163: Int = 0
+ def method164: Int = 0
+ def method165: Int = 0
+ def method166: Int = 0
+ def method167: Int = 0
+ def method168: Int = 0
+ def method169: Int = 0
+ def method170: Int = 0
+ def method171: Int = 0
+ def method172: Int = 0
+ def method173: Int = 0
+ def method174: Int = 0
+ def method175: Int = 0
+ def method176: Int = 0
+ def method177: Int = 0
+ def method178: Int = 0
+ def method179: Int = 0
+ def method180: Int = 0
+ def method181: Int = 0
+ def method182: Int = 0
+ def method183: Int = 0
+ def method184: Int = 0
+ def method185: Int = 0
+ def method186: Int = 0
+ def method187: Int = 0
+ def method188: Int = 0
+ def method189: Int = 0
+ def method190: Int = 0
+ def method191: Int = 0
+ def method192: Int = 0
+ def method193: Int = 0
+ def method194: Int = 0
+ def method195: Int = 0
+ def method196: Int = 0
+ def method197: Int = 0
+ def method198: Int = 0
+ def method199: Int = 0
+ def method200: Int = 0
+ def method201: Int = 0
+ def method202: Int = 0
+ def method203: Int = 0
+ def method204: Int = 0
+ def method205: Int = 0
+ def method206: Int = 0
+ def method207: Int = 0
+ def method208: Int = 0
+ def method209: Int = 0
+ def method210: Int = 0
+ def method211: Int = 0
+ def method212: Int = 0
+ def method213: Int = 0
+ def method214: Int = 0
+ def method215: Int = 0
+ def method216: Int = 0
+ def method217: Int = 0
+ def method218: Int = 0
+ def method219: Int = 0
+ def method220: Int = 0
+ def method221: Int = 0
+ def method222: Int = 0
+ def method223: Int = 0
+ def method224: Int = 0
+ def method225: Int = 0
+ def method226: Int = 0
+ def method227: Int = 0
+ def method228: Int = 0
+ def method229: Int = 0
+ def method230: Int = 0
+ def method231: Int = 0
+ def method232: Int = 0
+ def method233: Int = 0
+ def method234: Int = 0
+ def method235: Int = 0
+ def method236: Int = 0
+ def method237: Int = 0
+ def method238: Int = 0
+ def method239: Int = 0
+ def method240: Int = 0
+ def method241: Int = 0
+ def method242: Int = 0
+ def method243: Int = 0
+ def method244: Int = 0
+ def method245: Int = 0
+ def method246: Int = 0
+ def method247: Int = 0
+ def method248: Int = 0
+ def method249: Int = 0
+ def method250: Int = 0
+ def method251: Int = 0
+ def method252: Int = 0
+ def method253: Int = 0
+ def method254: Int = 0
+ def method255: Int = 0
+ def method256: Int = 0
+ def method257: Int = 0
+ def method258: Int = 0
+ def method259: Int = 0
+ def method260: Int = 0
+ def method261: Int = 0
+ def method262: Int = 0
+ def method263: Int = 0
+ def method264: Int = 0
+ def method265: Int = 0
+ def method266: Int = 0
+ def method267: Int = 0
+ def method268: Int = 0
+ def method269: Int = 0
+ def method270: Int = 0
+ def method271: Int = 0
+ def method272: Int = 0
+ def method273: Int = 0
+ def method274: Int = 0
+ def method275: Int = 0
+ def method276: Int = 0
+ def method277: Int = 0
+ def method278: Int = 0
+ def method279: Int = 0
+ def method280: Int = 0
+ def method281: Int = 0
+ def method282: Int = 0
+ def method283: Int = 0
+ def method284: Int = 0
+ def method285: Int = 0
+ def method286: Int = 0
+ def method287: Int = 0
+ def method288: Int = 0
+ def method289: Int = 0
+ def method290: Int = 0
+ def method291: Int = 0
+ def method292: Int = 0
+ def method293: Int = 0
+ def method294: Int = 0
+ def method295: Int = 0
+ def method296: Int = 0
+ def method297: Int = 0
+ def method298: Int = 0
+ def method299: Int = 0
+ def method300: Int = 0
+ def method301: Int = 0
+ def method302: Int = 0
+ def method303: Int = 0
+ def method304: Int = 0
+ def method305: Int = 0
+ def method306: Int = 0
+ def method307: Int = 0
+ def method308: Int = 0
+ def method309: Int = 0
+ def method310: Int = 0
+ def method311: Int = 0
+ def method312: Int = 0
+ def method313: Int = 0
+ def method314: Int = 0
+ def method315: Int = 0
+ def method316: Int = 0
+ def method317: Int = 0
+ def method318: Int = 0
+ def method319: Int = 0
+ def method320: Int = 0
+ def method321: Int = 0
+ def method322: Int = 0
+ def method323: Int = 0
+ def method324: Int = 0
+ def method325: Int = 0
+ def method326: Int = 0
+ def method327: Int = 0
+ def method328: Int = 0
+ def method329: Int = 0
+ def method330: Int = 0
+ def method331: Int = 0
+ def method332: Int = 0
+ def method333: Int = 0
+ def method334: Int = 0
+ def method335: Int = 0
+ def method336: Int = 0
+ def method337: Int = 0
+ def method338: Int = 0
+ def method339: Int = 0
+ def method340: Int = 0
+ def method341: Int = 0
+ def method342: Int = 0
+ def method343: Int = 0
+ def method344: Int = 0
+ def method345: Int = 0
+ def method346: Int = 0
+ def method347: Int = 0
+ def method348: Int = 0
+ def method349: Int = 0
+ def method350: Int = 0
+ def method351: Int = 0
+ def method352: Int = 0
+ def method353: Int = 0
+ def method354: Int = 0
+ def method355: Int = 0
+ def method356: Int = 0
+ def method357: Int = 0
+ def method358: Int = 0
+ def method359: Int = 0
+ def method360: Int = 0
+ def method361: Int = 0
+ def method362: Int = 0
+ def method363: Int = 0
+ def method364: Int = 0
+ def method365: Int = 0
+ def method366: Int = 0
+ def method367: Int = 0
+ def method368: Int = 0
+ def method369: Int = 0
+ def method370: Int = 0
+ def method371: Int = 0
+ def method372: Int = 0
+ def method373: Int = 0
+ def method374: Int = 0
+ def method375: Int = 0
+ def method376: Int = 0
+ def method377: Int = 0
+ def method378: Int = 0
+ def method379: Int = 0
+ def method380: Int = 0
+ def method381: Int = 0
+ def method382: Int = 0
+ def method383: Int = 0
+ def method384: Int = 0
+ def method385: Int = 0
+ def method386: Int = 0
+ def method387: Int = 0
+ def method388: Int = 0
+ def method389: Int = 0
+ def method390: Int = 0
+ def method391: Int = 0
+ def method392: Int = 0
+ def method393: Int = 0
+ def method394: Int = 0
+ def method395: Int = 0
+ def method396: Int = 0
+ def method397: Int = 0
+ def method398: Int = 0
+ def method399: Int = 0
+ def method400: Int = 0
+ def method401: Int = 0
+ def method402: Int = 0
+ def method403: Int = 0
+ def method404: Int = 0
+ def method405: Int = 0
+ def method406: Int = 0
+ def method407: Int = 0
+ def method408: Int = 0
+ def method409: Int = 0
+ def method410: Int = 0
+ def method411: Int = 0
+ def method412: Int = 0
+ def method413: Int = 0
+ def method414: Int = 0
+ def method415: Int = 0
+ def method416: Int = 0
+ def method417: Int = 0
+ def method418: Int = 0
+ def method419: Int = 0
+ def method420: Int = 0
+ def method421: Int = 0
+ def method422: Int = 0
+ def method423: Int = 0
+ def method424: Int = 0
+ def method425: Int = 0
+ def method426: Int = 0
+ def method427: Int = 0
+ def method428: Int = 0
+ def method429: Int = 0
+ def method430: Int = 0
+ def method431: Int = 0
+ def method432: Int = 0
+ def method433: Int = 0
+ def method434: Int = 0
+ def method435: Int = 0
+ def method436: Int = 0
+ def method437: Int = 0
+ def method438: Int = 0
+ def method439: Int = 0
+ def method440: Int = 0
+ def method441: Int = 0
+ def method442: Int = 0
+ def method443: Int = 0
+ def method444: Int = 0
+ def method445: Int = 0
+ def method446: Int = 0
+ def method447: Int = 0
+ def method448: Int = 0
+ def method449: Int = 0
+ def method450: Int = 0
+ def method451: Int = 0
+ def method452: Int = 0
+ def method453: Int = 0
+ def method454: Int = 0
+ def method455: Int = 0
+ def method456: Int = 0
+ def method457: Int = 0
+ def method458: Int = 0
+ def method459: Int = 0
+ def method460: Int = 0
+ def method461: Int = 0
+ def method462: Int = 0
+ def method463: Int = 0
+ def method464: Int = 0
+ def method465: Int = 0
+ def method466: Int = 0
+ def method467: Int = 0
+ def method468: Int = 0
+ def method469: Int = 0
+ def method470: Int = 0
+ def method471: Int = 0
+ def method472: Int = 0
+ def method473: Int = 0
+ def method474: Int = 0
+ def method475: Int = 0
+ def method476: Int = 0
+ def method477: Int = 0
+ def method478: Int = 0
+ def method479: Int = 0
+ def method480: Int = 0
+ def method481: Int = 0
+ def method482: Int = 0
+ def method483: Int = 0
+ def method484: Int = 0
+ def method485: Int = 0
+ def method486: Int = 0
+ def method487: Int = 0
+ def method488: Int = 0
+ def method489: Int = 0
+ def method490: Int = 0
+ def method491: Int = 0
+ def method492: Int = 0
+ def method493: Int = 0
+ def method494: Int = 0
+ def method495: Int = 0
+ def method496: Int = 0
+ def method497: Int = 0
+ def method498: Int = 0
+ def method499: Int = 0
+ def method500: Int = 0
+ def method501: Int = 0
+ def method502: Int = 0
+ def method503: Int = 0
+ def method504: Int = 0
+ def method505: Int = 0
+ def method506: Int = 0
+ def method507: Int = 0
+ def method508: Int = 0
+ def method509: Int = 0
+ def method510: Int = 0
+ def method511: Int = 0
+ def method512: Int = 0
+ def method513: Int = 0
+ def method514: Int = 0
+ def method515: Int = 0
+ def method516: Int = 0
+ def method517: Int = 0
+ def method518: Int = 0
+ def method519: Int = 0
+ def method520: Int = 0
+ def method521: Int = 0
+ def method522: Int = 0
+ def method523: Int = 0
+ def method524: Int = 0
+ def method525: Int = 0
+ def method526: Int = 0
+ def method527: Int = 0
+ def method528: Int = 0
+ def method529: Int = 0
+ def method530: Int = 0
+ def method531: Int = 0
+ def method532: Int = 0
+ def method533: Int = 0
+ def method534: Int = 0
+ def method535: Int = 0
+ def method536: Int = 0
+ def method537: Int = 0
+ def method538: Int = 0
+ def method539: Int = 0
+ def method540: Int = 0
+ def method541: Int = 0
+ def method542: Int = 0
+ def method543: Int = 0
+ def method544: Int = 0
+ def method545: Int = 0
+ def method546: Int = 0
+ def method547: Int = 0
+ def method548: Int = 0
+ def method549: Int = 0
+ def method550: Int = 0
+ def method551: Int = 0
+ def method552: Int = 0
+ def method553: Int = 0
+ def method554: Int = 0
+ def method555: Int = 0
+ def method556: Int = 0
+ def method557: Int = 0
+ def method558: Int = 0
+ def method559: Int = 0
+ def method560: Int = 0
+ def method561: Int = 0
+ def method562: Int = 0
+ def method563: Int = 0
+ def method564: Int = 0
+ def method565: Int = 0
+ def method566: Int = 0
+ def method567: Int = 0
+ def method568: Int = 0
+ def method569: Int = 0
+ def method570: Int = 0
+ def method571: Int = 0
+ def method572: Int = 0
+ def method573: Int = 0
+ def method574: Int = 0
+ def method575: Int = 0
+ def method576: Int = 0
+ def method577: Int = 0
+ def method578: Int = 0
+ def method579: Int = 0
+ def method580: Int = 0
+ def method581: Int = 0
+ def method582: Int = 0
+ def method583: Int = 0
+ def method584: Int = 0
+ def method585: Int = 0
+ def method586: Int = 0
+ def method587: Int = 0
+ def method588: Int = 0
+ def method589: Int = 0
+ def method590: Int = 0
+ def method591: Int = 0
+ def method592: Int = 0
+ def method593: Int = 0
+ def method594: Int = 0
+ def method595: Int = 0
+ def method596: Int = 0
+ def method597: Int = 0
+ def method598: Int = 0
+ def method599: Int = 0
+ def method600: Int = 0
+ def method601: Int = 0
+ def method602: Int = 0
+ def method603: Int = 0
+ def method604: Int = 0
+ def method605: Int = 0
+ def method606: Int = 0
+ def method607: Int = 0
+ def method608: Int = 0
+ def method609: Int = 0
+ def method610: Int = 0
+ def method611: Int = 0
+ def method612: Int = 0
+ def method613: Int = 0
+ def method614: Int = 0
+ def method615: Int = 0
+ def method616: Int = 0
+ def method617: Int = 0
+ def method618: Int = 0
+ def method619: Int = 0
+ def method620: Int = 0
+ def method621: Int = 0
+ def method622: Int = 0
+ def method623: Int = 0
+ def method624: Int = 0
+ def method625: Int = 0
+ def method626: Int = 0
+ def method627: Int = 0
+ def method628: Int = 0
+ def method629: Int = 0
+ def method630: Int = 0
+ def method631: Int = 0
+ def method632: Int = 0
+ def method633: Int = 0
+ def method634: Int = 0
+ def method635: Int = 0
+ def method636: Int = 0
+ def method637: Int = 0
+ def method638: Int = 0
+ def method639: Int = 0
+ def method640: Int = 0
+ def method641: Int = 0
+ def method642: Int = 0
+ def method643: Int = 0
+ def method644: Int = 0
+ def method645: Int = 0
+ def method646: Int = 0
+ def method647: Int = 0
+ def method648: Int = 0
+ def method649: Int = 0
+ def method650: Int = 0
+ def method651: Int = 0
+ def method652: Int = 0
+ def method653: Int = 0
+ def method654: Int = 0
+ def method655: Int = 0
+ def method656: Int = 0
+ def method657: Int = 0
+ def method658: Int = 0
+ def method659: Int = 0
+ def method660: Int = 0
+ def method661: Int = 0
+ def method662: Int = 0
+ def method663: Int = 0
+ def method664: Int = 0
+ def method665: Int = 0
+ def method666: Int = 0
+ def method667: Int = 0
+ def method668: Int = 0
+ def method669: Int = 0
+ def method670: Int = 0
+ def method671: Int = 0
+ def method672: Int = 0
+ def method673: Int = 0
+ def method674: Int = 0
+ def method675: Int = 0
+ def method676: Int = 0
+ def method677: Int = 0
+ def method678: Int = 0
+ def method679: Int = 0
+ def method680: Int = 0
+ def method681: Int = 0
+ def method682: Int = 0
+ def method683: Int = 0
+ def method684: Int = 0
+ def method685: Int = 0
+ def method686: Int = 0
+ def method687: Int = 0
+ def method688: Int = 0
+ def method689: Int = 0
+ def method690: Int = 0
+ def method691: Int = 0
+ def method692: Int = 0
+ def method693: Int = 0
+ def method694: Int = 0
+ def method695: Int = 0
+ def method696: Int = 0
+ def method697: Int = 0
+ def method698: Int = 0
+ def method699: Int = 0
+ def method700: Int = 0
+ def method701: Int = 0
+ def method702: Int = 0
+ def method703: Int = 0
+ def method704: Int = 0
+ def method705: Int = 0
+ def method706: Int = 0
+ def method707: Int = 0
+ def method708: Int = 0
+ def method709: Int = 0
+ def method710: Int = 0
+ def method711: Int = 0
+ def method712: Int = 0
+ def method713: Int = 0
+ def method714: Int = 0
+ def method715: Int = 0
+ def method716: Int = 0
+ def method717: Int = 0
+ def method718: Int = 0
+ def method719: Int = 0
+ def method720: Int = 0
+ def method721: Int = 0
+ def method722: Int = 0
+ def method723: Int = 0
+ def method724: Int = 0
+ def method725: Int = 0
+ def method726: Int = 0
+ def method727: Int = 0
+ def method728: Int = 0
+ def method729: Int = 0
+ def method730: Int = 0
+ def method731: Int = 0
+ def method732: Int = 0
+ def method733: Int = 0
+ def method734: Int = 0
+ def method735: Int = 0
+ def method736: Int = 0
+ def method737: Int = 0
+ def method738: Int = 0
+ def method739: Int = 0
+ def method740: Int = 0
+ def method741: Int = 0
+ def method742: Int = 0
+ def method743: Int = 0
+ def method744: Int = 0
+ def method745: Int = 0
+ def method746: Int = 0
+ def method747: Int = 0
+ def method748: Int = 0
+ def method749: Int = 0
+ def method750: Int = 0
+ def method751: Int = 0
+ def method752: Int = 0
+ def method753: Int = 0
+ def method754: Int = 0
+ def method755: Int = 0
+ def method756: Int = 0
+ def method757: Int = 0
+ def method758: Int = 0
+ def method759: Int = 0
+ def method760: Int = 0
+ def method761: Int = 0
+ def method762: Int = 0
+ def method763: Int = 0
+ def method764: Int = 0
+ def method765: Int = 0
+ def method766: Int = 0
+ def method767: Int = 0
+ def method768: Int = 0
+ def method769: Int = 0
+ def method770: Int = 0
+ def method771: Int = 0
+ def method772: Int = 0
+ def method773: Int = 0
+ def method774: Int = 0
+ def method775: Int = 0
+ def method776: Int = 0
+ def method777: Int = 0
+ def method778: Int = 0
+ def method779: Int = 0
+ def method780: Int = 0
+ def method781: Int = 0
+ def method782: Int = 0
+ def method783: Int = 0
+ def method784: Int = 0
+ def method785: Int = 0
+ def method786: Int = 0
+ def method787: Int = 0
+ def method788: Int = 0
+ def method789: Int = 0
+ def method790: Int = 0
+ def method791: Int = 0
+ def method792: Int = 0
+ def method793: Int = 0
+ def method794: Int = 0
+ def method795: Int = 0
+ def method796: Int = 0
+ def method797: Int = 0
+ def method798: Int = 0
+ def method799: Int = 0
+ def method800: Int = 0
+ def method801: Int = 0
+ def method802: Int = 0
+ def method803: Int = 0
+ def method804: Int = 0
+ def method805: Int = 0
+ def method806: Int = 0
+ def method807: Int = 0
+ def method808: Int = 0
+ def method809: Int = 0
+ def method810: Int = 0
+ def method811: Int = 0
+ def method812: Int = 0
+ def method813: Int = 0
+ def method814: Int = 0
+ def method815: Int = 0
+ def method816: Int = 0
+ def method817: Int = 0
+ def method818: Int = 0
+ def method819: Int = 0
+ def method820: Int = 0
+ def method821: Int = 0
+ def method822: Int = 0
+ def method823: Int = 0
+ def method824: Int = 0
+ def method825: Int = 0
+ def method826: Int = 0
+ def method827: Int = 0
+ def method828: Int = 0
+ def method829: Int = 0
+ def method830: Int = 0
+ def method831: Int = 0
+ def method832: Int = 0
+ def method833: Int = 0
+ def method834: Int = 0
+ def method835: Int = 0
+ def method836: Int = 0
+ def method837: Int = 0
+ def method838: Int = 0
+ def method839: Int = 0
+ def method840: Int = 0
+ def method841: Int = 0
+ def method842: Int = 0
+ def method843: Int = 0
+ def method844: Int = 0
+ def method845: Int = 0
+ def method846: Int = 0
+ def method847: Int = 0
+ def method848: Int = 0
+ def method849: Int = 0
+ def method850: Int = 0
+ def method851: Int = 0
+ def method852: Int = 0
+ def method853: Int = 0
+ def method854: Int = 0
+ def method855: Int = 0
+ def method856: Int = 0
+ def method857: Int = 0
+ def method858: Int = 0
+ def method859: Int = 0
+ def method860: Int = 0
+ def method861: Int = 0
+ def method862: Int = 0
+ def method863: Int = 0
+ def method864: Int = 0
+ def method865: Int = 0
+ def method866: Int = 0
+ def method867: Int = 0
+ def method868: Int = 0
+ def method869: Int = 0
+ def method870: Int = 0
+ def method871: Int = 0
+ def method872: Int = 0
+ def method873: Int = 0
+ def method874: Int = 0
+ def method875: Int = 0
+ def method876: Int = 0
+ def method877: Int = 0
+ def method878: Int = 0
+ def method879: Int = 0
+ def method880: Int = 0
+ def method881: Int = 0
+ def method882: Int = 0
+ def method883: Int = 0
+ def method884: Int = 0
+ def method885: Int = 0
+ def method886: Int = 0
+ def method887: Int = 0
+ def method888: Int = 0
+ def method889: Int = 0
+ def method890: Int = 0
+ def method891: Int = 0
+ def method892: Int = 0
+ def method893: Int = 0
+ def method894: Int = 0
+ def method895: Int = 0
+ def method896: Int = 0
+ def method897: Int = 0
+ def method898: Int = 0
+ def method899: Int = 0
+ def method900: Int = 0
+ def method901: Int = 0
+ def method902: Int = 0
+ def method903: Int = 0
+ def method904: Int = 0
+ def method905: Int = 0
+ def method906: Int = 0
+ def method907: Int = 0
+ def method908: Int = 0
+ def method909: Int = 0
+ def method910: Int = 0
+ def method911: Int = 0
+ def method912: Int = 0
+ def method913: Int = 0
+ def method914: Int = 0
+ def method915: Int = 0
+ def method916: Int = 0
+ def method917: Int = 0
+ def method918: Int = 0
+ def method919: Int = 0
+ def method920: Int = 0
+ def method921: Int = 0
+ def method922: Int = 0
+ def method923: Int = 0
+ def method924: Int = 0
+ def method925: Int = 0
+ def method926: Int = 0
+ def method927: Int = 0
+ def method928: Int = 0
+ def method929: Int = 0
+ def method930: Int = 0
+ def method931: Int = 0
+ def method932: Int = 0
+ def method933: Int = 0
+ def method934: Int = 0
+ def method935: Int = 0
+ def method936: Int = 0
+ def method937: Int = 0
+ def method938: Int = 0
+ def method939: Int = 0
+ def method940: Int = 0
+ def method941: Int = 0
+ def method942: Int = 0
+ def method943: Int = 0
+ def method944: Int = 0
+ def method945: Int = 0
+ def method946: Int = 0
+ def method947: Int = 0
+ def method948: Int = 0
+ def method949: Int = 0
+ def method950: Int = 0
+ def method951: Int = 0
+ def method952: Int = 0
+ def method953: Int = 0
+ def method954: Int = 0
+ def method955: Int = 0
+ def method956: Int = 0
+ def method957: Int = 0
+ def method958: Int = 0
+ def method959: Int = 0
+ def method960: Int = 0
+ def method961: Int = 0
+ def method962: Int = 0
+ def method963: Int = 0
+ def method964: Int = 0
+ def method965: Int = 0
+ def method966: Int = 0
+ def method967: Int = 0
+ def method968: Int = 0
+ def method969: Int = 0
+ def method970: Int = 0
+ def method971: Int = 0
+ def method972: Int = 0
+ def method973: Int = 0
+ def method974: Int = 0
+ def method975: Int = 0
+ def method976: Int = 0
+ def method977: Int = 0
+ def method978: Int = 0
+ def method979: Int = 0
+ def method980: Int = 0
+ def method981: Int = 0
+ def method982: Int = 0
+ def method983: Int = 0
+ def method984: Int = 0
+ def method985: Int = 0
+ def method986: Int = 0
+ def method987: Int = 0
+ def method988: Int = 0
+ def method989: Int = 0
+ def method990: Int = 0
+ def method991: Int = 0
+ def method992: Int = 0
+ def method993: Int = 0
+ def method994: Int = 0
+ def method995: Int = 0
+ def method996: Int = 0
+ def method997: Int = 0
+ def method998: Int = 0
+ def method999: Int = 0
+ def method1000: Int = 0
+ def method1001: Int = 0
+ def method1002: Int = 0
+ def method1003: Int = 0
+ def method1004: Int = 0
+ def method1005: Int = 0
+ def method1006: Int = 0
+ def method1007: Int = 0
+ def method1008: Int = 0
+ def method1009: Int = 0
+ def method1010: Int = 0
+ def method1011: Int = 0
+ def method1012: Int = 0
+ def method1013: Int = 0
+ def method1014: Int = 0
+ def method1015: Int = 0
+ def method1016: Int = 0
+ def method1017: Int = 0
+ def method1018: Int = 0
+ def method1019: Int = 0
+ def method1020: Int = 0
+ def method1021: Int = 0
+ def method1022: Int = 0
+ def method1023: Int = 0
+ def method1024: Int = 0
+ def method1025: Int = 0
+ def method1026: Int = 0
+ def method1027: Int = 0
+ def method1028: Int = 0
+ def method1029: Int = 0
+ def method1030: Int = 0
+ def method1031: Int = 0
+ def method1032: Int = 0
+ def method1033: Int = 0
+ def method1034: Int = 0
+ def method1035: Int = 0
+ def method1036: Int = 0
+ def method1037: Int = 0
+ def method1038: Int = 0
+ def method1039: Int = 0
+ def method1040: Int = 0
+ def method1041: Int = 0
+ def method1042: Int = 0
+ def method1043: Int = 0
+ def method1044: Int = 0
+ def method1045: Int = 0
+ def method1046: Int = 0
+ def method1047: Int = 0
+ def method1048: Int = 0
+ def method1049: Int = 0
+ def method1050: Int = 0
+ def method1051: Int = 0
+ def method1052: Int = 0
+ def method1053: Int = 0
+ def method1054: Int = 0
+ def method1055: Int = 0
+ def method1056: Int = 0
+ def method1057: Int = 0
+ def method1058: Int = 0
+ def method1059: Int = 0
+ def method1060: Int = 0
+ def method1061: Int = 0
+ def method1062: Int = 0
+ def method1063: Int = 0
+ def method1064: Int = 0
+ def method1065: Int = 0
+ def method1066: Int = 0
+ def method1067: Int = 0
+ def method1068: Int = 0
+ def method1069: Int = 0
+ def method1070: Int = 0
+ def method1071: Int = 0
+ def method1072: Int = 0
+ def method1073: Int = 0
+ def method1074: Int = 0
+ def method1075: Int = 0
+ def method1076: Int = 0
+ def method1077: Int = 0
+ def method1078: Int = 0
+ def method1079: Int = 0
+ def method1080: Int = 0
+ def method1081: Int = 0
+ def method1082: Int = 0
+ def method1083: Int = 0
+ def method1084: Int = 0
+ def method1085: Int = 0
+ def method1086: Int = 0
+ def method1087: Int = 0
+ def method1088: Int = 0
+ def method1089: Int = 0
+ def method1090: Int = 0
+ def method1091: Int = 0
+ def method1092: Int = 0
+ def method1093: Int = 0
+ def method1094: Int = 0
+ def method1095: Int = 0
+ def method1096: Int = 0
+ def method1097: Int = 0
+ def method1098: Int = 0
+ def method1099: Int = 0
+ def method1100: Int = 0
+ def method1101: Int = 0
+ def method1102: Int = 0
+ def method1103: Int = 0
+ def method1104: Int = 0
+ def method1105: Int = 0
+ def method1106: Int = 0
+ def method1107: Int = 0
+ def method1108: Int = 0
+ def method1109: Int = 0
+ def method1110: Int = 0
+ def method1111: Int = 0
+ def method1112: Int = 0
+ def method1113: Int = 0
+ def method1114: Int = 0
+ def method1115: Int = 0
+ def method1116: Int = 0
+ def method1117: Int = 0
+ def method1118: Int = 0
+ def method1119: Int = 0
+ def method1120: Int = 0
+ def method1121: Int = 0
+ def method1122: Int = 0
+ def method1123: Int = 0
+ def method1124: Int = 0
+ def method1125: Int = 0
+ def method1126: Int = 0
+ def method1127: Int = 0
+ def method1128: Int = 0
+ def method1129: Int = 0
+ def method1130: Int = 0
+ def method1131: Int = 0
+ def method1132: Int = 0
+ def method1133: Int = 0
+ def method1134: Int = 0
+ def method1135: Int = 0
+ def method1136: Int = 0
+ def method1137: Int = 0
+ def method1138: Int = 0
+ def method1139: Int = 0
+ def method1140: Int = 0
+ def method1141: Int = 0
+ def method1142: Int = 0
+ def method1143: Int = 0
+ def method1144: Int = 0
+ def method1145: Int = 0
+ def method1146: Int = 0
+ def method1147: Int = 0
+ def method1148: Int = 0
+ def method1149: Int = 0
+ def method1150: Int = 0
+ def method1151: Int = 0
+ def method1152: Int = 0
+ def method1153: Int = 0
+ def method1154: Int = 0
+ def method1155: Int = 0
+ def method1156: Int = 0
+ def method1157: Int = 0
+ def method1158: Int = 0
+ def method1159: Int = 0
+ def method1160: Int = 0
+ def method1161: Int = 0
+ def method1162: Int = 0
+ def method1163: Int = 0
+ def method1164: Int = 0
+ def method1165: Int = 0
+ def method1166: Int = 0
+ def method1167: Int = 0
+ def method1168: Int = 0
+ def method1169: Int = 0
+ def method1170: Int = 0
+ def method1171: Int = 0
+ def method1172: Int = 0
+ def method1173: Int = 0
+ def method1174: Int = 0
+ def method1175: Int = 0
+ def method1176: Int = 0
+ def method1177: Int = 0
+ def method1178: Int = 0
+ def method1179: Int = 0
+ def method1180: Int = 0
+ def method1181: Int = 0
+ def method1182: Int = 0
+ def method1183: Int = 0
+ def method1184: Int = 0
+ def method1185: Int = 0
+ def method1186: Int = 0
+ def method1187: Int = 0
+ def method1188: Int = 0
+ def method1189: Int = 0
+ def method1190: Int = 0
+ def method1191: Int = 0
+ def method1192: Int = 0
+ def method1193: Int = 0
+ def method1194: Int = 0
+ def method1195: Int = 0
+ def method1196: Int = 0
+ def method1197: Int = 0
+ def method1198: Int = 0
+ def method1199: Int = 0
+ def method1200: Int = 0
+ def method1201: Int = 0
+ def method1202: Int = 0
+ def method1203: Int = 0
+ def method1204: Int = 0
+ def method1205: Int = 0
+ def method1206: Int = 0
+ def method1207: Int = 0
+ def method1208: Int = 0
+ def method1209: Int = 0
+ def method1210: Int = 0
+ def method1211: Int = 0
+ def method1212: Int = 0
+ def method1213: Int = 0
+ def method1214: Int = 0
+ def method1215: Int = 0
+ def method1216: Int = 0
+ def method1217: Int = 0
+ def method1218: Int = 0
+ def method1219: Int = 0
+ def method1220: Int = 0
+ def method1221: Int = 0
+ def method1222: Int = 0
+ def method1223: Int = 0
+ def method1224: Int = 0
+ def method1225: Int = 0
+ def method1226: Int = 0
+ def method1227: Int = 0
+ def method1228: Int = 0
+ def method1229: Int = 0
+ def method1230: Int = 0
+ def method1231: Int = 0
+ def method1232: Int = 0
+ def method1233: Int = 0
+ def method1234: Int = 0
+ def method1235: Int = 0
+ def method1236: Int = 0
+ def method1237: Int = 0
+ def method1238: Int = 0
+ def method1239: Int = 0
+ def method1240: Int = 0
+ def method1241: Int = 0
+ def method1242: Int = 0
+ def method1243: Int = 0
+ def method1244: Int = 0
+ def method1245: Int = 0
+ def method1246: Int = 0
+ def method1247: Int = 0
+ def method1248: Int = 0
+ def method1249: Int = 0
+ def method1250: Int = 0
+ def method1251: Int = 0
+ def method1252: Int = 0
+ def method1253: Int = 0
+ def method1254: Int = 0
+ def method1255: Int = 0
+ def method1256: Int = 0
+ def method1257: Int = 0
+ def method1258: Int = 0
+ def method1259: Int = 0
+ def method1260: Int = 0
+ def method1261: Int = 0
+ def method1262: Int = 0
+ def method1263: Int = 0
+ def method1264: Int = 0
+ def method1265: Int = 0
+ def method1266: Int = 0
+ def method1267: Int = 0
+ def method1268: Int = 0
+ def method1269: Int = 0
+ def method1270: Int = 0
+ def method1271: Int = 0
+ def method1272: Int = 0
+ def method1273: Int = 0
+ def method1274: Int = 0
+ def method1275: Int = 0
+ def method1276: Int = 0
+ def method1277: Int = 0
+ def method1278: Int = 0
+ def method1279: Int = 0
+ def method1280: Int = 0
+ def method1281: Int = 0
+ def method1282: Int = 0
+ def method1283: Int = 0
+ def method1284: Int = 0
+ def method1285: Int = 0
+ def method1286: Int = 0
+ def method1287: Int = 0
+ def method1288: Int = 0
+ def method1289: Int = 0
+ def method1290: Int = 0
+ def method1291: Int = 0
+ def method1292: Int = 0
+ def method1293: Int = 0
+ def method1294: Int = 0
+ def method1295: Int = 0
+ def method1296: Int = 0
+ def method1297: Int = 0
+ def method1298: Int = 0
+ def method1299: Int = 0
+ def method1300: Int = 0
+ def method1301: Int = 0
+ def method1302: Int = 0
+ def method1303: Int = 0
+ def method1304: Int = 0
+ def method1305: Int = 0
+ def method1306: Int = 0
+ def method1307: Int = 0
+ def method1308: Int = 0
+ def method1309: Int = 0
+ def method1310: Int = 0
+ def method1311: Int = 0
+ def method1312: Int = 0
+ def method1313: Int = 0
+ def method1314: Int = 0
+ def method1315: Int = 0
+ def method1316: Int = 0
+ def method1317: Int = 0
+ def method1318: Int = 0
+ def method1319: Int = 0
+ def method1320: Int = 0
+ def method1321: Int = 0
+ def method1322: Int = 0
+ def method1323: Int = 0
+ def method1324: Int = 0
+ def method1325: Int = 0
+ def method1326: Int = 0
+ def method1327: Int = 0
+ def method1328: Int = 0
+ def method1329: Int = 0
+ def method1330: Int = 0
+ def method1331: Int = 0
+ def method1332: Int = 0
+ def method1333: Int = 0
+ def method1334: Int = 0
+ def method1335: Int = 0
+ def method1336: Int = 0
+ def method1337: Int = 0
+ def method1338: Int = 0
+ def method1339: Int = 0
+ def method1340: Int = 0
+ def method1341: Int = 0
+ def method1342: Int = 0
+ def method1343: Int = 0
+ def method1344: Int = 0
+ def method1345: Int = 0
+ def method1346: Int = 0
+ def method1347: Int = 0
+ def method1348: Int = 0
+ def method1349: Int = 0
+ def method1350: Int = 0
+ def method1351: Int = 0
+ def method1352: Int = 0
+ def method1353: Int = 0
+ def method1354: Int = 0
+ def method1355: Int = 0
+ def method1356: Int = 0
+ def method1357: Int = 0
+ def method1358: Int = 0
+ def method1359: Int = 0
+ def method1360: Int = 0
+ def method1361: Int = 0
+ def method1362: Int = 0
+ def method1363: Int = 0
+ def method1364: Int = 0
+ def method1365: Int = 0
+ def method1366: Int = 0
+ def method1367: Int = 0
+ def method1368: Int = 0
+ def method1369: Int = 0
+ def method1370: Int = 0
+ def method1371: Int = 0
+ def method1372: Int = 0
+ def method1373: Int = 0
+ def method1374: Int = 0
+ def method1375: Int = 0
+ def method1376: Int = 0
+ def method1377: Int = 0
+ def method1378: Int = 0
+ def method1379: Int = 0
+ def method1380: Int = 0
+ def method1381: Int = 0
+ def method1382: Int = 0
+ def method1383: Int = 0
+ def method1384: Int = 0
+ def method1385: Int = 0
+ def method1386: Int = 0
+ def method1387: Int = 0
+ def method1388: Int = 0
+ def method1389: Int = 0
+ def method1390: Int = 0
+ def method1391: Int = 0
+ def method1392: Int = 0
+ def method1393: Int = 0
+ def method1394: Int = 0
+ def method1395: Int = 0
+ def method1396: Int = 0
+ def method1397: Int = 0
+ def method1398: Int = 0
+ def method1399: Int = 0
+ def method1400: Int = 0
+ def method1401: Int = 0
+ def method1402: Int = 0
+ def method1403: Int = 0
+ def method1404: Int = 0
+ def method1405: Int = 0
+ def method1406: Int = 0
+ def method1407: Int = 0
+ def method1408: Int = 0
+ def method1409: Int = 0
+ def method1410: Int = 0
+ def method1411: Int = 0
+ def method1412: Int = 0
+ def method1413: Int = 0
+ def method1414: Int = 0
+ def method1415: Int = 0
+ def method1416: Int = 0
+ def method1417: Int = 0
+ def method1418: Int = 0
+ def method1419: Int = 0
+ def method1420: Int = 0
+ def method1421: Int = 0
+ def method1422: Int = 0
+ def method1423: Int = 0
+ def method1424: Int = 0
+ def method1425: Int = 0
+ def method1426: Int = 0
+ def method1427: Int = 0
+ def method1428: Int = 0
+ def method1429: Int = 0
+ def method1430: Int = 0
+ def method1431: Int = 0
+ def method1432: Int = 0
+ def method1433: Int = 0
+ def method1434: Int = 0
+ def method1435: Int = 0
+ def method1436: Int = 0
+ def method1437: Int = 0
+ def method1438: Int = 0
+ def method1439: Int = 0
+ def method1440: Int = 0
+ def method1441: Int = 0
+ def method1442: Int = 0
+ def method1443: Int = 0
+ def method1444: Int = 0
+ def method1445: Int = 0
+ def method1446: Int = 0
+ def method1447: Int = 0
+ def method1448: Int = 0
+ def method1449: Int = 0
+ def method1450: Int = 0
+ def method1451: Int = 0
+ def method1452: Int = 0
+ def method1453: Int = 0
+ def method1454: Int = 0
+ def method1455: Int = 0
+ def method1456: Int = 0
+ def method1457: Int = 0
+ def method1458: Int = 0
+ def method1459: Int = 0
+ def method1460: Int = 0
+ def method1461: Int = 0
+ def method1462: Int = 0
+ def method1463: Int = 0
+ def method1464: Int = 0
+ def method1465: Int = 0
+ def method1466: Int = 0
+ def method1467: Int = 0
+ def method1468: Int = 0
+ def method1469: Int = 0
+ def method1470: Int = 0
+ def method1471: Int = 0
+ def method1472: Int = 0
+ def method1473: Int = 0
+ def method1474: Int = 0
+ def method1475: Int = 0
+ def method1476: Int = 0
+ def method1477: Int = 0
+ def method1478: Int = 0
+ def method1479: Int = 0
+ def method1480: Int = 0
+ def method1481: Int = 0
+ def method1482: Int = 0
+ def method1483: Int = 0
+ def method1484: Int = 0
+ def method1485: Int = 0
+ def method1486: Int = 0
+ def method1487: Int = 0
+ def method1488: Int = 0
+ def method1489: Int = 0
+ def method1490: Int = 0
+ def method1491: Int = 0
+ def method1492: Int = 0
+ def method1493: Int = 0
+ def method1494: Int = 0
+ def method1495: Int = 0
+ def method1496: Int = 0
+ def method1497: Int = 0
+ def method1498: Int = 0
+ def method1499: Int = 0
+ def method1500: Int = 0
+ def method1501: Int = 0
+ def method1502: Int = 0
+ def method1503: Int = 0
+ def method1504: Int = 0
+ def method1505: Int = 0
+ def method1506: Int = 0
+ def method1507: Int = 0
+ def method1508: Int = 0
+ def method1509: Int = 0
+ def method1510: Int = 0
+ def method1511: Int = 0
+ def method1512: Int = 0
+ def method1513: Int = 0
+ def method1514: Int = 0
+ def method1515: Int = 0
+ def method1516: Int = 0
+ def method1517: Int = 0
+ def method1518: Int = 0
+ def method1519: Int = 0
+ def method1520: Int = 0
+ def method1521: Int = 0
+ def method1522: Int = 0
+ def method1523: Int = 0
+ def method1524: Int = 0
+ def method1525: Int = 0
+ def method1526: Int = 0
+ def method1527: Int = 0
+ def method1528: Int = 0
+ def method1529: Int = 0
+ def method1530: Int = 0
+ def method1531: Int = 0
+ def method1532: Int = 0
+ def method1533: Int = 0
+ def method1534: Int = 0
+ def method1535: Int = 0
+ def method1536: Int = 0
+ def method1537: Int = 0
+ def method1538: Int = 0
+ def method1539: Int = 0
+ def method1540: Int = 0
+ def method1541: Int = 0
+ def method1542: Int = 0
+ def method1543: Int = 0
+ def method1544: Int = 0
+ def method1545: Int = 0
+ def method1546: Int = 0
+ def method1547: Int = 0
+ def method1548: Int = 0
+ def method1549: Int = 0
+ def method1550: Int = 0
+ def method1551: Int = 0
+ def method1552: Int = 0
+ def method1553: Int = 0
+ def method1554: Int = 0
+ def method1555: Int = 0
+ def method1556: Int = 0
+ def method1557: Int = 0
+ def method1558: Int = 0
+ def method1559: Int = 0
+ def method1560: Int = 0
+ def method1561: Int = 0
+ def method1562: Int = 0
+ def method1563: Int = 0
+ def method1564: Int = 0
+ def method1565: Int = 0
+ def method1566: Int = 0
+ def method1567: Int = 0
+ def method1568: Int = 0
+ def method1569: Int = 0
+ def method1570: Int = 0
+ def method1571: Int = 0
+ def method1572: Int = 0
+ def method1573: Int = 0
+ def method1574: Int = 0
+ def method1575: Int = 0
+ def method1576: Int = 0
+ def method1577: Int = 0
+ def method1578: Int = 0
+ def method1579: Int = 0
+ def method1580: Int = 0
+ def method1581: Int = 0
+ def method1582: Int = 0
+ def method1583: Int = 0
+ def method1584: Int = 0
+ def method1585: Int = 0
+ def method1586: Int = 0
+ def method1587: Int = 0
+ def method1588: Int = 0
+ def method1589: Int = 0
+ def method1590: Int = 0
+ def method1591: Int = 0
+ def method1592: Int = 0
+ def method1593: Int = 0
+ def method1594: Int = 0
+ def method1595: Int = 0
+ def method1596: Int = 0
+ def method1597: Int = 0
+ def method1598: Int = 0
+ def method1599: Int = 0
+ def method1600: Int = 0
+ def method1601: Int = 0
+ def method1602: Int = 0
+ def method1603: Int = 0
+ def method1604: Int = 0
+ def method1605: Int = 0
+ def method1606: Int = 0
+ def method1607: Int = 0
+ def method1608: Int = 0
+ def method1609: Int = 0
+ def method1610: Int = 0
+ def method1611: Int = 0
+ def method1612: Int = 0
+ def method1613: Int = 0
+ def method1614: Int = 0
+ def method1615: Int = 0
+ def method1616: Int = 0
+ def method1617: Int = 0
+ def method1618: Int = 0
+ def method1619: Int = 0
+ def method1620: Int = 0
+ def method1621: Int = 0
+ def method1622: Int = 0
+ def method1623: Int = 0
+ def method1624: Int = 0
+ def method1625: Int = 0
+ def method1626: Int = 0
+ def method1627: Int = 0
+ def method1628: Int = 0
+ def method1629: Int = 0
+ def method1630: Int = 0
+ def method1631: Int = 0
+ def method1632: Int = 0
+ def method1633: Int = 0
+ def method1634: Int = 0
+ def method1635: Int = 0
+ def method1636: Int = 0
+ def method1637: Int = 0
+ def method1638: Int = 0
+ def method1639: Int = 0
+ def method1640: Int = 0
+ def method1641: Int = 0
+ def method1642: Int = 0
+ def method1643: Int = 0
+ def method1644: Int = 0
+ def method1645: Int = 0
+ def method1646: Int = 0
+ def method1647: Int = 0
+ def method1648: Int = 0
+ def method1649: Int = 0
+ def method1650: Int = 0
+ def method1651: Int = 0
+ def method1652: Int = 0
+ def method1653: Int = 0
+ def method1654: Int = 0
+ def method1655: Int = 0
+ def method1656: Int = 0
+ def method1657: Int = 0
+ def method1658: Int = 0
+ def method1659: Int = 0
+ def method1660: Int = 0
+ def method1661: Int = 0
+ def method1662: Int = 0
+ def method1663: Int = 0
+ def method1664: Int = 0
+ def method1665: Int = 0
+ def method1666: Int = 0
+ def method1667: Int = 0
+ def method1668: Int = 0
+ def method1669: Int = 0
+ def method1670: Int = 0
+ def method1671: Int = 0
+ def method1672: Int = 0
+ def method1673: Int = 0
+ def method1674: Int = 0
+ def method1675: Int = 0
+ def method1676: Int = 0
+ def method1677: Int = 0
+ def method1678: Int = 0
+ def method1679: Int = 0
+ def method1680: Int = 0
+ def method1681: Int = 0
+ def method1682: Int = 0
+ def method1683: Int = 0
+ def method1684: Int = 0
+ def method1685: Int = 0
+ def method1686: Int = 0
+ def method1687: Int = 0
+ def method1688: Int = 0
+ def method1689: Int = 0
+ def method1690: Int = 0
+ def method1691: Int = 0
+ def method1692: Int = 0
+ def method1693: Int = 0
+ def method1694: Int = 0
+ def method1695: Int = 0
+ def method1696: Int = 0
+ def method1697: Int = 0
+ def method1698: Int = 0
+ def method1699: Int = 0
+ def method1700: Int = 0
+ def method1701: Int = 0
+ def method1702: Int = 0
+ def method1703: Int = 0
+ def method1704: Int = 0
+ def method1705: Int = 0
+ def method1706: Int = 0
+ def method1707: Int = 0
+ def method1708: Int = 0
+ def method1709: Int = 0
+ def method1710: Int = 0
+ def method1711: Int = 0
+ def method1712: Int = 0
+ def method1713: Int = 0
+ def method1714: Int = 0
+ def method1715: Int = 0
+ def method1716: Int = 0
+ def method1717: Int = 0
+ def method1718: Int = 0
+ def method1719: Int = 0
+ def method1720: Int = 0
+ def method1721: Int = 0
+ def method1722: Int = 0
+ def method1723: Int = 0
+ def method1724: Int = 0
+ def method1725: Int = 0
+ def method1726: Int = 0
+ def method1727: Int = 0
+ def method1728: Int = 0
+ def method1729: Int = 0
+ def method1730: Int = 0
+ def method1731: Int = 0
+ def method1732: Int = 0
+ def method1733: Int = 0
+ def method1734: Int = 0
+ def method1735: Int = 0
+ def method1736: Int = 0
+ def method1737: Int = 0
+ def method1738: Int = 0
+ def method1739: Int = 0
+ def method1740: Int = 0
+ def method1741: Int = 0
+ def method1742: Int = 0
+ def method1743: Int = 0
+ def method1744: Int = 0
+ def method1745: Int = 0
+ def method1746: Int = 0
+ def method1747: Int = 0
+ def method1748: Int = 0
+ def method1749: Int = 0
+ def method1750: Int = 0
+ def method1751: Int = 0
+ def method1752: Int = 0
+ def method1753: Int = 0
+ def method1754: Int = 0
+ def method1755: Int = 0
+ def method1756: Int = 0
+ def method1757: Int = 0
+ def method1758: Int = 0
+ def method1759: Int = 0
+ def method1760: Int = 0
+ def method1761: Int = 0
+ def method1762: Int = 0
+ def method1763: Int = 0
+ def method1764: Int = 0
+ def method1765: Int = 0
+ def method1766: Int = 0
+ def method1767: Int = 0
+ def method1768: Int = 0
+ def method1769: Int = 0
+ def method1770: Int = 0
+ def method1771: Int = 0
+ def method1772: Int = 0
+ def method1773: Int = 0
+ def method1774: Int = 0
+ def method1775: Int = 0
+ def method1776: Int = 0
+ def method1777: Int = 0
+ def method1778: Int = 0
+ def method1779: Int = 0
+ def method1780: Int = 0
+ def method1781: Int = 0
+ def method1782: Int = 0
+ def method1783: Int = 0
+ def method1784: Int = 0
+ def method1785: Int = 0
+ def method1786: Int = 0
+ def method1787: Int = 0
+ def method1788: Int = 0
+ def method1789: Int = 0
+ def method1790: Int = 0
+ def method1791: Int = 0
+ def method1792: Int = 0
+ def method1793: Int = 0
+ def method1794: Int = 0
+ def method1795: Int = 0
+ def method1796: Int = 0
+ def method1797: Int = 0
+ def method1798: Int = 0
+ def method1799: Int = 0
+ def method1800: Int = 0
+ def method1801: Int = 0
+ def method1802: Int = 0
+ def method1803: Int = 0
+ def method1804: Int = 0
+ def method1805: Int = 0
+ def method1806: Int = 0
+ def method1807: Int = 0
+ def method1808: Int = 0
+ def method1809: Int = 0
+ def method1810: Int = 0
+ def method1811: Int = 0
+ def method1812: Int = 0
+ def method1813: Int = 0
+ def method1814: Int = 0
+ def method1815: Int = 0
+ def method1816: Int = 0
+ def method1817: Int = 0
+ def method1818: Int = 0
+ def method1819: Int = 0
+ def method1820: Int = 0
+ def method1821: Int = 0
+ def method1822: Int = 0
+ def method1823: Int = 0
+ def method1824: Int = 0
+ def method1825: Int = 0
+ def method1826: Int = 0
+ def method1827: Int = 0
+ def method1828: Int = 0
+ def method1829: Int = 0
+ def method1830: Int = 0
+ def method1831: Int = 0
+ def method1832: Int = 0
+ def method1833: Int = 0
+ def method1834: Int = 0
+ def method1835: Int = 0
+ def method1836: Int = 0
+ def method1837: Int = 0
+ def method1838: Int = 0
+ def method1839: Int = 0
+ def method1840: Int = 0
+ def method1841: Int = 0
+ def method1842: Int = 0
+ def method1843: Int = 0
+ def method1844: Int = 0
+ def method1845: Int = 0
+ def method1846: Int = 0
+ def method1847: Int = 0
+ def method1848: Int = 0
+ def method1849: Int = 0
+ def method1850: Int = 0
+ def method1851: Int = 0
+ def method1852: Int = 0
+ def method1853: Int = 0
+ def method1854: Int = 0
+ def method1855: Int = 0
+ def method1856: Int = 0
+ def method1857: Int = 0
+ def method1858: Int = 0
+ def method1859: Int = 0
+ def method1860: Int = 0
+ def method1861: Int = 0
+ def method1862: Int = 0
+ def method1863: Int = 0
+ def method1864: Int = 0
+ def method1865: Int = 0
+ def method1866: Int = 0
+ def method1867: Int = 0
+ def method1868: Int = 0
+ def method1869: Int = 0
+ def method1870: Int = 0
+ def method1871: Int = 0
+ def method1872: Int = 0
+ def method1873: Int = 0
+ def method1874: Int = 0
+ def method1875: Int = 0
+ def method1876: Int = 0
+ def method1877: Int = 0
+ def method1878: Int = 0
+ def method1879: Int = 0
+ def method1880: Int = 0
+ def method1881: Int = 0
+ def method1882: Int = 0
+ def method1883: Int = 0
+ def method1884: Int = 0
+ def method1885: Int = 0
+ def method1886: Int = 0
+ def method1887: Int = 0
+ def method1888: Int = 0
+ def method1889: Int = 0
+ def method1890: Int = 0
+ def method1891: Int = 0
+ def method1892: Int = 0
+ def method1893: Int = 0
+ def method1894: Int = 0
+ def method1895: Int = 0
+ def method1896: Int = 0
+ def method1897: Int = 0
+ def method1898: Int = 0
+ def method1899: Int = 0
+ def method1900: Int = 0
+ def method1901: Int = 0
+ def method1902: Int = 0
+ def method1903: Int = 0
+ def method1904: Int = 0
+ def method1905: Int = 0
+ def method1906: Int = 0
+ def method1907: Int = 0
+ def method1908: Int = 0
+ def method1909: Int = 0
+ def method1910: Int = 0
+ def method1911: Int = 0
+ def method1912: Int = 0
+ def method1913: Int = 0
+ def method1914: Int = 0
+ def method1915: Int = 0
+ def method1916: Int = 0
+ def method1917: Int = 0
+ def method1918: Int = 0
+ def method1919: Int = 0
+ def method1920: Int = 0
+ def method1921: Int = 0
+ def method1922: Int = 0
+ def method1923: Int = 0
+ def method1924: Int = 0
+ def method1925: Int = 0
+ def method1926: Int = 0
+ def method1927: Int = 0
+ def method1928: Int = 0
+ def method1929: Int = 0
+ def method1930: Int = 0
+ def method1931: Int = 0
+ def method1932: Int = 0
+ def method1933: Int = 0
+ def method1934: Int = 0
+ def method1935: Int = 0
+ def method1936: Int = 0
+ def method1937: Int = 0
+ def method1938: Int = 0
+ def method1939: Int = 0
+ def method1940: Int = 0
+ def method1941: Int = 0
+ def method1942: Int = 0
+ def method1943: Int = 0
+ def method1944: Int = 0
+ def method1945: Int = 0
+ def method1946: Int = 0
+ def method1947: Int = 0
+ def method1948: Int = 0
+ def method1949: Int = 0
+ def method1950: Int = 0
+ def method1951: Int = 0
+ def method1952: Int = 0
+ def method1953: Int = 0
+ def method1954: Int = 0
+ def method1955: Int = 0
+ def method1956: Int = 0
+ def method1957: Int = 0
+ def method1958: Int = 0
+ def method1959: Int = 0
+ def method1960: Int = 0
+ def method1961: Int = 0
+ def method1962: Int = 0
+ def method1963: Int = 0
+ def method1964: Int = 0
+ def method1965: Int = 0
+ def method1966: Int = 0
+ def method1967: Int = 0
+ def method1968: Int = 0
+ def method1969: Int = 0
+ def method1970: Int = 0
+ def method1971: Int = 0
+ def method1972: Int = 0
+ def method1973: Int = 0
+ def method1974: Int = 0
+ def method1975: Int = 0
+ def method1976: Int = 0
+ def method1977: Int = 0
+ def method1978: Int = 0
+ def method1979: Int = 0
+ def method1980: Int = 0
+ def method1981: Int = 0
+ def method1982: Int = 0
+ def method1983: Int = 0
+ def method1984: Int = 0
+ def method1985: Int = 0
+ def method1986: Int = 0
+ def method1987: Int = 0
+ def method1988: Int = 0
+ def method1989: Int = 0
+ def method1990: Int = 0
+ def method1991: Int = 0
+ def method1992: Int = 0
+ def method1993: Int = 0
+ def method1994: Int = 0
+ def method1995: Int = 0
+ def method1996: Int = 0
+ def method1997: Int = 0
+ def method1998: Int = 0
+ def method1999: Int = 0
+ def method2000: Int = 0
+ def method2001: Int = 0
+ def method2002: Int = 0
+ def method2003: Int = 0
+ def method2004: Int = 0
+ def method2005: Int = 0
+ def method2006: Int = 0
+ def method2007: Int = 0
+ def method2008: Int = 0
+ def method2009: Int = 0
+ def method2010: Int = 0
+ def method2011: Int = 0
+ def method2012: Int = 0
+ def method2013: Int = 0
+ def method2014: Int = 0
+ def method2015: Int = 0
+ def method2016: Int = 0
+ def method2017: Int = 0
+ def method2018: Int = 0
+ def method2019: Int = 0
+ def method2020: Int = 0
+ def method2021: Int = 0
+ def method2022: Int = 0
+ def method2023: Int = 0
+ def method2024: Int = 0
+ def method2025: Int = 0
+ def method2026: Int = 0
+ def method2027: Int = 0
+ def method2028: Int = 0
+ def method2029: Int = 0
+ def method2030: Int = 0
+ def method2031: Int = 0
+ def method2032: Int = 0
+ def method2033: Int = 0
+ def method2034: Int = 0
+ def method2035: Int = 0
+ def method2036: Int = 0
+ def method2037: Int = 0
+ def method2038: Int = 0
+ def method2039: Int = 0
+ def method2040: Int = 0
+ def method2041: Int = 0
+ def method2042: Int = 0
+ def method2043: Int = 0
+ def method2044: Int = 0
+ def method2045: Int = 0
+ def method2046: Int = 0
+ def method2047: Int = 0
+ def method2048: Int = 0
+ def method2049: Int = 0
+ def method2050: Int = 0
+ def method2051: Int = 0
+ def method2052: Int = 0
+ def method2053: Int = 0
+ def method2054: Int = 0
+ def method2055: Int = 0
+ def method2056: Int = 0
+ def method2057: Int = 0
+ def method2058: Int = 0
+ def method2059: Int = 0
+ def method2060: Int = 0
+ def method2061: Int = 0
+ def method2062: Int = 0
+ def method2063: Int = 0
+ def method2064: Int = 0
+ def method2065: Int = 0
+ def method2066: Int = 0
+ def method2067: Int = 0
+ def method2068: Int = 0
+ def method2069: Int = 0
+ def method2070: Int = 0
+ def method2071: Int = 0
+ def method2072: Int = 0
+ def method2073: Int = 0
+ def method2074: Int = 0
+ def method2075: Int = 0
+ def method2076: Int = 0
+ def method2077: Int = 0
+ def method2078: Int = 0
+ def method2079: Int = 0
+ def method2080: Int = 0
+ def method2081: Int = 0
+ def method2082: Int = 0
+ def method2083: Int = 0
+ def method2084: Int = 0
+ def method2085: Int = 0
+ def method2086: Int = 0
+ def method2087: Int = 0
+ def method2088: Int = 0
+ def method2089: Int = 0
+ def method2090: Int = 0
+ def method2091: Int = 0
+ def method2092: Int = 0
+ def method2093: Int = 0
+ def method2094: Int = 0
+ def method2095: Int = 0
+ def method2096: Int = 0
+ def method2097: Int = 0
+ def method2098: Int = 0
+ def method2099: Int = 0
+ def method2100: Int = 0
+ def method2101: Int = 0
+ def method2102: Int = 0
+ def method2103: Int = 0
+ def method2104: Int = 0
+ def method2105: Int = 0
+ def method2106: Int = 0
+ def method2107: Int = 0
+ def method2108: Int = 0
+ def method2109: Int = 0
+ def method2110: Int = 0
+ def method2111: Int = 0
+ def method2112: Int = 0
+ def method2113: Int = 0
+ def method2114: Int = 0
+ def method2115: Int = 0
+ def method2116: Int = 0
+ def method2117: Int = 0
+ def method2118: Int = 0
+ def method2119: Int = 0
+ def method2120: Int = 0
+ def method2121: Int = 0
+ def method2122: Int = 0
+ def method2123: Int = 0
+ def method2124: Int = 0
+ def method2125: Int = 0
+ def method2126: Int = 0
+ def method2127: Int = 0
+ def method2128: Int = 0
+ def method2129: Int = 0
+ def method2130: Int = 0
+ def method2131: Int = 0
+ def method2132: Int = 0
+ def method2133: Int = 0
+ def method2134: Int = 0
+ def method2135: Int = 0
+ def method2136: Int = 0
+ def method2137: Int = 0
+ def method2138: Int = 0
+ def method2139: Int = 0
+ def method2140: Int = 0
+ def method2141: Int = 0
+ def method2142: Int = 0
+ def method2143: Int = 0
+ def method2144: Int = 0
+ def method2145: Int = 0
+ def method2146: Int = 0
+ def method2147: Int = 0
+ def method2148: Int = 0
+ def method2149: Int = 0
+ def method2150: Int = 0
+ def method2151: Int = 0
+ def method2152: Int = 0
+ def method2153: Int = 0
+ def method2154: Int = 0
+ def method2155: Int = 0
+ def method2156: Int = 0
+ def method2157: Int = 0
+ def method2158: Int = 0
+ def method2159: Int = 0
+ def method2160: Int = 0
+ def method2161: Int = 0
+ def method2162: Int = 0
+ def method2163: Int = 0
+ def method2164: Int = 0
+ def method2165: Int = 0
+ def method2166: Int = 0
+ def method2167: Int = 0
+ def method2168: Int = 0
+ def method2169: Int = 0
+ def method2170: Int = 0
+ def method2171: Int = 0
+ def method2172: Int = 0
+ def method2173: Int = 0
+ def method2174: Int = 0
+ def method2175: Int = 0
+ def method2176: Int = 0
+ def method2177: Int = 0
+ def method2178: Int = 0
+ def method2179: Int = 0
+ def method2180: Int = 0
+ def method2181: Int = 0
+ def method2182: Int = 0
+ def method2183: Int = 0
+ def method2184: Int = 0
+ def method2185: Int = 0
+ def method2186: Int = 0
+ def method2187: Int = 0
+ def method2188: Int = 0
+ def method2189: Int = 0
+ def method2190: Int = 0
+ def method2191: Int = 0
+ def method2192: Int = 0
+ def method2193: Int = 0
+ def method2194: Int = 0
+ def method2195: Int = 0
+ def method2196: Int = 0
+ def method2197: Int = 0
+ def method2198: Int = 0
+ def method2199: Int = 0
+ def method2200: Int = 0
+ def method2201: Int = 0
+ def method2202: Int = 0
+ def method2203: Int = 0
+ def method2204: Int = 0
+ def method2205: Int = 0
+ def method2206: Int = 0
+ def method2207: Int = 0
+ def method2208: Int = 0
+ def method2209: Int = 0
+ def method2210: Int = 0
+ def method2211: Int = 0
+ def method2212: Int = 0
+ def method2213: Int = 0
+ def method2214: Int = 0
+ def method2215: Int = 0
+ def method2216: Int = 0
+ def method2217: Int = 0
+ def method2218: Int = 0
+ def method2219: Int = 0
+ def method2220: Int = 0
+ def method2221: Int = 0
+ def method2222: Int = 0
+ def method2223: Int = 0
+ def method2224: Int = 0
+ def method2225: Int = 0
+ def method2226: Int = 0
+ def method2227: Int = 0
+ def method2228: Int = 0
+ def method2229: Int = 0
+ def method2230: Int = 0
+ def method2231: Int = 0
+ def method2232: Int = 0
+ def method2233: Int = 0
+ def method2234: Int = 0
+ def method2235: Int = 0
+ def method2236: Int = 0
+ def method2237: Int = 0
+ def method2238: Int = 0
+ def method2239: Int = 0
+ def method2240: Int = 0
+ def method2241: Int = 0
+ def method2242: Int = 0
+ def method2243: Int = 0
+ def method2244: Int = 0
+ def method2245: Int = 0
+ def method2246: Int = 0
+ def method2247: Int = 0
+ def method2248: Int = 0
+ def method2249: Int = 0
+ def method2250: Int = 0
+ def method2251: Int = 0
+ def method2252: Int = 0
+ def method2253: Int = 0
+ def method2254: Int = 0
+ def method2255: Int = 0
+ def method2256: Int = 0
+ def method2257: Int = 0
+ def method2258: Int = 0
+ def method2259: Int = 0
+ def method2260: Int = 0
+ def method2261: Int = 0
+ def method2262: Int = 0
+ def method2263: Int = 0
+ def method2264: Int = 0
+ def method2265: Int = 0
+ def method2266: Int = 0
+ def method2267: Int = 0
+ def method2268: Int = 0
+ def method2269: Int = 0
+ def method2270: Int = 0
+ def method2271: Int = 0
+ def method2272: Int = 0
+ def method2273: Int = 0
+ def method2274: Int = 0
+ def method2275: Int = 0
+ def method2276: Int = 0
+ def method2277: Int = 0
+ def method2278: Int = 0
+ def method2279: Int = 0
+ def method2280: Int = 0
+ def method2281: Int = 0
+ def method2282: Int = 0
+ def method2283: Int = 0
+ def method2284: Int = 0
+ def method2285: Int = 0
+ def method2286: Int = 0
+ def method2287: Int = 0
+ def method2288: Int = 0
+ def method2289: Int = 0
+ def method2290: Int = 0
+ def method2291: Int = 0
+ def method2292: Int = 0
+ def method2293: Int = 0
+ def method2294: Int = 0
+ def method2295: Int = 0
+ def method2296: Int = 0
+ def method2297: Int = 0
+ def method2298: Int = 0
+ def method2299: Int = 0
+ def method2300: Int = 0
+ def method2301: Int = 0
+ def method2302: Int = 0
+ def method2303: Int = 0
+ def method2304: Int = 0
+ def method2305: Int = 0
+ def method2306: Int = 0
+ def method2307: Int = 0
+ def method2308: Int = 0
+ def method2309: Int = 0
+ def method2310: Int = 0
+ def method2311: Int = 0
+ def method2312: Int = 0
+ def method2313: Int = 0
+ def method2314: Int = 0
+ def method2315: Int = 0
+ def method2316: Int = 0
+ def method2317: Int = 0
+ def method2318: Int = 0
+ def method2319: Int = 0
+ def method2320: Int = 0
+ def method2321: Int = 0
+ def method2322: Int = 0
+ def method2323: Int = 0
+ def method2324: Int = 0
+ def method2325: Int = 0
+ def method2326: Int = 0
+ def method2327: Int = 0
+ def method2328: Int = 0
+ def method2329: Int = 0
+ def method2330: Int = 0
+ def method2331: Int = 0
+ def method2332: Int = 0
+ def method2333: Int = 0
+ def method2334: Int = 0
+ def method2335: Int = 0
+ def method2336: Int = 0
+ def method2337: Int = 0
+ def method2338: Int = 0
+ def method2339: Int = 0
+ def method2340: Int = 0
+ def method2341: Int = 0
+ def method2342: Int = 0
+ def method2343: Int = 0
+ def method2344: Int = 0
+ def method2345: Int = 0
+ def method2346: Int = 0
+ def method2347: Int = 0
+ def method2348: Int = 0
+ def method2349: Int = 0
+ def method2350: Int = 0
+ def method2351: Int = 0
+ def method2352: Int = 0
+ def method2353: Int = 0
+ def method2354: Int = 0
+ def method2355: Int = 0
+ def method2356: Int = 0
+ def method2357: Int = 0
+ def method2358: Int = 0
+ def method2359: Int = 0
+ def method2360: Int = 0
+ def method2361: Int = 0
+ def method2362: Int = 0
+ def method2363: Int = 0
+ def method2364: Int = 0
+ def method2365: Int = 0
+ def method2366: Int = 0
+ def method2367: Int = 0
+ def method2368: Int = 0
+ def method2369: Int = 0
+ def method2370: Int = 0
+ def method2371: Int = 0
+ def method2372: Int = 0
+ def method2373: Int = 0
+ def method2374: Int = 0
+ def method2375: Int = 0
+ def method2376: Int = 0
+ def method2377: Int = 0
+ def method2378: Int = 0
+ def method2379: Int = 0
+ def method2380: Int = 0
+ def method2381: Int = 0
+ def method2382: Int = 0
+ def method2383: Int = 0
+ def method2384: Int = 0
+ def method2385: Int = 0
+ def method2386: Int = 0
+ def method2387: Int = 0
+ def method2388: Int = 0
+ def method2389: Int = 0
+ def method2390: Int = 0
+ def method2391: Int = 0
+ def method2392: Int = 0
+ def method2393: Int = 0
+ def method2394: Int = 0
+ def method2395: Int = 0
+ def method2396: Int = 0
+ def method2397: Int = 0
+ def method2398: Int = 0
+ def method2399: Int = 0
+ def method2400: Int = 0
+ def method2401: Int = 0
+ def method2402: Int = 0
+ def method2403: Int = 0
+ def method2404: Int = 0
+ def method2405: Int = 0
+ def method2406: Int = 0
+ def method2407: Int = 0
+ def method2408: Int = 0
+ def method2409: Int = 0
+ def method2410: Int = 0
+ def method2411: Int = 0
+ def method2412: Int = 0
+ def method2413: Int = 0
+ def method2414: Int = 0
+ def method2415: Int = 0
+ def method2416: Int = 0
+ def method2417: Int = 0
+ def method2418: Int = 0
+ def method2419: Int = 0
+ def method2420: Int = 0
+ def method2421: Int = 0
+ def method2422: Int = 0
+ def method2423: Int = 0
+ def method2424: Int = 0
+ def method2425: Int = 0
+ def method2426: Int = 0
+ def method2427: Int = 0
+ def method2428: Int = 0
+ def method2429: Int = 0
+ def method2430: Int = 0
+ def method2431: Int = 0
+ def method2432: Int = 0
+ def method2433: Int = 0
+ def method2434: Int = 0
+ def method2435: Int = 0
+ def method2436: Int = 0
+ def method2437: Int = 0
+ def method2438: Int = 0
+ def method2439: Int = 0
+ def method2440: Int = 0
+ def method2441: Int = 0
+ def method2442: Int = 0
+ def method2443: Int = 0
+ def method2444: Int = 0
+ def method2445: Int = 0
+ def method2446: Int = 0
+ def method2447: Int = 0
+ def method2448: Int = 0
+ def method2449: Int = 0
+ def method2450: Int = 0
+ def method2451: Int = 0
+ def method2452: Int = 0
+ def method2453: Int = 0
+ def method2454: Int = 0
+ def method2455: Int = 0
+ def method2456: Int = 0
+ def method2457: Int = 0
+ def method2458: Int = 0
+ def method2459: Int = 0
+ def method2460: Int = 0
+ def method2461: Int = 0
+ def method2462: Int = 0
+ def method2463: Int = 0
+ def method2464: Int = 0
+ def method2465: Int = 0
+ def method2466: Int = 0
+ def method2467: Int = 0
+ def method2468: Int = 0
+ def method2469: Int = 0
+ def method2470: Int = 0
+ def method2471: Int = 0
+ def method2472: Int = 0
+ def method2473: Int = 0
+ def method2474: Int = 0
+ def method2475: Int = 0
+ def method2476: Int = 0
+ def method2477: Int = 0
+ def method2478: Int = 0
+ def method2479: Int = 0
+ def method2480: Int = 0
+ def method2481: Int = 0
+ def method2482: Int = 0
+ def method2483: Int = 0
+ def method2484: Int = 0
+ def method2485: Int = 0
+ def method2486: Int = 0
+ def method2487: Int = 0
+ def method2488: Int = 0
+ def method2489: Int = 0
+ def method2490: Int = 0
+ def method2491: Int = 0
+ def method2492: Int = 0
+ def method2493: Int = 0
+ def method2494: Int = 0
+ def method2495: Int = 0
+ def method2496: Int = 0
+ def method2497: Int = 0
+ def method2498: Int = 0
+ def method2499: Int = 0
+ def method2500: Int = 0
+ def method2501: Int = 0
+ def method2502: Int = 0
+ def method2503: Int = 0
+ def method2504: Int = 0
+ def method2505: Int = 0
+ def method2506: Int = 0
+ def method2507: Int = 0
+ def method2508: Int = 0
+ def method2509: Int = 0
+ def method2510: Int = 0
+ def method2511: Int = 0
+ def method2512: Int = 0
+ def method2513: Int = 0
+ def method2514: Int = 0
+ def method2515: Int = 0
+ def method2516: Int = 0
+ def method2517: Int = 0
+ def method2518: Int = 0
+ def method2519: Int = 0
+ def method2520: Int = 0
+ def method2521: Int = 0
+ def method2522: Int = 0
+ def method2523: Int = 0
+ def method2524: Int = 0
+ def method2525: Int = 0
+ def method2526: Int = 0
+ def method2527: Int = 0
+ def method2528: Int = 0
+ def method2529: Int = 0
+ def method2530: Int = 0
+ def method2531: Int = 0
+ def method2532: Int = 0
+ def method2533: Int = 0
+ def method2534: Int = 0
+ def method2535: Int = 0
+ def method2536: Int = 0
+ def method2537: Int = 0
+ def method2538: Int = 0
+ def method2539: Int = 0
+ def method2540: Int = 0
+ def method2541: Int = 0
+ def method2542: Int = 0
+ def method2543: Int = 0
+ def method2544: Int = 0
+ def method2545: Int = 0
+ def method2546: Int = 0
+ def method2547: Int = 0
+ def method2548: Int = 0
+ def method2549: Int = 0
+ def method2550: Int = 0
+ def method2551: Int = 0
+ def method2552: Int = 0
+ def method2553: Int = 0
+ def method2554: Int = 0
+ def method2555: Int = 0
+ def method2556: Int = 0
+ def method2557: Int = 0
+ def method2558: Int = 0
+ def method2559: Int = 0
+ def method2560: Int = 0
+ def method2561: Int = 0
+ def method2562: Int = 0
+ def method2563: Int = 0
+ def method2564: Int = 0
+ def method2565: Int = 0
+ def method2566: Int = 0
+ def method2567: Int = 0
+ def method2568: Int = 0
+ def method2569: Int = 0
+ def method2570: Int = 0
+ def method2571: Int = 0
+ def method2572: Int = 0
+ def method2573: Int = 0
+ def method2574: Int = 0
+ def method2575: Int = 0
+ def method2576: Int = 0
+ def method2577: Int = 0
+ def method2578: Int = 0
+ def method2579: Int = 0
+ def method2580: Int = 0
+ def method2581: Int = 0
+ def method2582: Int = 0
+ def method2583: Int = 0
+ def method2584: Int = 0
+ def method2585: Int = 0
+ def method2586: Int = 0
+ def method2587: Int = 0
+ def method2588: Int = 0
+ def method2589: Int = 0
+ def method2590: Int = 0
+ def method2591: Int = 0
+ def method2592: Int = 0
+ def method2593: Int = 0
+ def method2594: Int = 0
+ def method2595: Int = 0
+ def method2596: Int = 0
+ def method2597: Int = 0
+ def method2598: Int = 0
+ def method2599: Int = 0
+ def method2600: Int = 0
+ def method2601: Int = 0
+ def method2602: Int = 0
+ def method2603: Int = 0
+ def method2604: Int = 0
+ def method2605: Int = 0
+ def method2606: Int = 0
+ def method2607: Int = 0
+ def method2608: Int = 0
+ def method2609: Int = 0
+ def method2610: Int = 0
+ def method2611: Int = 0
+ def method2612: Int = 0
+ def method2613: Int = 0
+ def method2614: Int = 0
+ def method2615: Int = 0
+ def method2616: Int = 0
+ def method2617: Int = 0
+ def method2618: Int = 0
+ def method2619: Int = 0
+ def method2620: Int = 0
+ def method2621: Int = 0
+ def method2622: Int = 0
+ def method2623: Int = 0
+ def method2624: Int = 0
+ def method2625: Int = 0
+ def method2626: Int = 0
+ def method2627: Int = 0
+ def method2628: Int = 0
+ def method2629: Int = 0
+ def method2630: Int = 0
+ def method2631: Int = 0
+ def method2632: Int = 0
+ def method2633: Int = 0
+ def method2634: Int = 0
+ def method2635: Int = 0
+ def method2636: Int = 0
+ def method2637: Int = 0
+ def method2638: Int = 0
+ def method2639: Int = 0
+ def method2640: Int = 0
+ def method2641: Int = 0
+ def method2642: Int = 0
+ def method2643: Int = 0
+ def method2644: Int = 0
+ def method2645: Int = 0
+ def method2646: Int = 0
+ def method2647: Int = 0
+ def method2648: Int = 0
+ def method2649: Int = 0
+ def method2650: Int = 0
+ def method2651: Int = 0
+ def method2652: Int = 0
+ def method2653: Int = 0
+ def method2654: Int = 0
+ def method2655: Int = 0
+ def method2656: Int = 0
+ def method2657: Int = 0
+ def method2658: Int = 0
+ def method2659: Int = 0
+ def method2660: Int = 0
+ def method2661: Int = 0
+ def method2662: Int = 0
+ def method2663: Int = 0
+ def method2664: Int = 0
+ def method2665: Int = 0
+ def method2666: Int = 0
+ def method2667: Int = 0
+ def method2668: Int = 0
+ def method2669: Int = 0
+ def method2670: Int = 0
+ def method2671: Int = 0
+ def method2672: Int = 0
+ def method2673: Int = 0
+ def method2674: Int = 0
+ def method2675: Int = 0
+ def method2676: Int = 0
+ def method2677: Int = 0
+ def method2678: Int = 0
+ def method2679: Int = 0
+ def method2680: Int = 0
+ def method2681: Int = 0
+ def method2682: Int = 0
+ def method2683: Int = 0
+ def method2684: Int = 0
+ def method2685: Int = 0
+ def method2686: Int = 0
+ def method2687: Int = 0
+ def method2688: Int = 0
+ def method2689: Int = 0
+ def method2690: Int = 0
+ def method2691: Int = 0
+ def method2692: Int = 0
+ def method2693: Int = 0
+ def method2694: Int = 0
+ def method2695: Int = 0
+ def method2696: Int = 0
+ def method2697: Int = 0
+ def method2698: Int = 0
+ def method2699: Int = 0
+ def method2700: Int = 0
+ def method2701: Int = 0
+ def method2702: Int = 0
+ def method2703: Int = 0
+ def method2704: Int = 0
+ def method2705: Int = 0
+ def method2706: Int = 0
+ def method2707: Int = 0
+ def method2708: Int = 0
+ def method2709: Int = 0
+ def method2710: Int = 0
+ def method2711: Int = 0
+ def method2712: Int = 0
+ def method2713: Int = 0
+ def method2714: Int = 0
+ def method2715: Int = 0
+ def method2716: Int = 0
+ def method2717: Int = 0
+ def method2718: Int = 0
+ def method2719: Int = 0
+ def method2720: Int = 0
+ def method2721: Int = 0
+ def method2722: Int = 0
+ def method2723: Int = 0
+ def method2724: Int = 0
+ def method2725: Int = 0
+ def method2726: Int = 0
+ def method2727: Int = 0
+ def method2728: Int = 0
+ def method2729: Int = 0
+ def method2730: Int = 0
+ def method2731: Int = 0
+ def method2732: Int = 0
+ def method2733: Int = 0
+ def method2734: Int = 0
+ def method2735: Int = 0
+ def method2736: Int = 0
+ def method2737: Int = 0
+ def method2738: Int = 0
+ def method2739: Int = 0
+ def method2740: Int = 0
+ def method2741: Int = 0
+ def method2742: Int = 0
+ def method2743: Int = 0
+ def method2744: Int = 0
+ def method2745: Int = 0
+ def method2746: Int = 0
+ def method2747: Int = 0
+ def method2748: Int = 0
+ def method2749: Int = 0
+ def method2750: Int = 0
+ def method2751: Int = 0
+ def method2752: Int = 0
+ def method2753: Int = 0
+ def method2754: Int = 0
+ def method2755: Int = 0
+ def method2756: Int = 0
+ def method2757: Int = 0
+ def method2758: Int = 0
+ def method2759: Int = 0
+ def method2760: Int = 0
+ def method2761: Int = 0
+ def method2762: Int = 0
+ def method2763: Int = 0
+ def method2764: Int = 0
+ def method2765: Int = 0
+ def method2766: Int = 0
+ def method2767: Int = 0
+ def method2768: Int = 0
+ def method2769: Int = 0
+ def method2770: Int = 0
+ def method2771: Int = 0
+ def method2772: Int = 0
+ def method2773: Int = 0
+ def method2774: Int = 0
+ def method2775: Int = 0
+ def method2776: Int = 0
+ def method2777: Int = 0
+ def method2778: Int = 0
+ def method2779: Int = 0
+ def method2780: Int = 0
+ def method2781: Int = 0
+ def method2782: Int = 0
+ def method2783: Int = 0
+ def method2784: Int = 0
+ def method2785: Int = 0
+ def method2786: Int = 0
+ def method2787: Int = 0
+ def method2788: Int = 0
+ def method2789: Int = 0
+ def method2790: Int = 0
+ def method2791: Int = 0
+ def method2792: Int = 0
+ def method2793: Int = 0
+ def method2794: Int = 0
+ def method2795: Int = 0
+ def method2796: Int = 0
+ def method2797: Int = 0
+ def method2798: Int = 0
+ def method2799: Int = 0
+ def method2800: Int = 0
+ def method2801: Int = 0
+ def method2802: Int = 0
+ def method2803: Int = 0
+ def method2804: Int = 0
+ def method2805: Int = 0
+ def method2806: Int = 0
+ def method2807: Int = 0
+ def method2808: Int = 0
+ def method2809: Int = 0
+ def method2810: Int = 0
+ def method2811: Int = 0
+ def method2812: Int = 0
+ def method2813: Int = 0
+ def method2814: Int = 0
+ def method2815: Int = 0
+ def method2816: Int = 0
+ def method2817: Int = 0
+ def method2818: Int = 0
+ def method2819: Int = 0
+ def method2820: Int = 0
+ def method2821: Int = 0
+ def method2822: Int = 0
+ def method2823: Int = 0
+ def method2824: Int = 0
+ def method2825: Int = 0
+ def method2826: Int = 0
+ def method2827: Int = 0
+ def method2828: Int = 0
+ def method2829: Int = 0
+ def method2830: Int = 0
+ def method2831: Int = 0
+ def method2832: Int = 0
+ def method2833: Int = 0
+ def method2834: Int = 0
+ def method2835: Int = 0
+ def method2836: Int = 0
+ def method2837: Int = 0
+ def method2838: Int = 0
+ def method2839: Int = 0
+ def method2840: Int = 0
+ def method2841: Int = 0
+ def method2842: Int = 0
+ def method2843: Int = 0
+ def method2844: Int = 0
+ def method2845: Int = 0
+ def method2846: Int = 0
+ def method2847: Int = 0
+ def method2848: Int = 0
+ def method2849: Int = 0
+ def method2850: Int = 0
+ def method2851: Int = 0
+ def method2852: Int = 0
+ def method2853: Int = 0
+ def method2854: Int = 0
+ def method2855: Int = 0
+ def method2856: Int = 0
+ def method2857: Int = 0
+ def method2858: Int = 0
+ def method2859: Int = 0
+ def method2860: Int = 0
+ def method2861: Int = 0
+ def method2862: Int = 0
+ def method2863: Int = 0
+ def method2864: Int = 0
+ def method2865: Int = 0
+ def method2866: Int = 0
+ def method2867: Int = 0
+ def method2868: Int = 0
+ def method2869: Int = 0
+ def method2870: Int = 0
+ def method2871: Int = 0
+ def method2872: Int = 0
+ def method2873: Int = 0
+ def method2874: Int = 0
+ def method2875: Int = 0
+ def method2876: Int = 0
+ def method2877: Int = 0
+ def method2878: Int = 0
+ def method2879: Int = 0
+ def method2880: Int = 0
+ def method2881: Int = 0
+ def method2882: Int = 0
+ def method2883: Int = 0
+ def method2884: Int = 0
+ def method2885: Int = 0
+ def method2886: Int = 0
+ def method2887: Int = 0
+ def method2888: Int = 0
+ def method2889: Int = 0
+ def method2890: Int = 0
+ def method2891: Int = 0
+ def method2892: Int = 0
+ def method2893: Int = 0
+ def method2894: Int = 0
+ def method2895: Int = 0
+ def method2896: Int = 0
+ def method2897: Int = 0
+ def method2898: Int = 0
+ def method2899: Int = 0
+ def method2900: Int = 0
+ def method2901: Int = 0
+ def method2902: Int = 0
+ def method2903: Int = 0
+ def method2904: Int = 0
+ def method2905: Int = 0
+ def method2906: Int = 0
+ def method2907: Int = 0
+ def method2908: Int = 0
+ def method2909: Int = 0
+ def method2910: Int = 0
+ def method2911: Int = 0
+ def method2912: Int = 0
+ def method2913: Int = 0
+ def method2914: Int = 0
+ def method2915: Int = 0
+ def method2916: Int = 0
+ def method2917: Int = 0
+ def method2918: Int = 0
+ def method2919: Int = 0
+ def method2920: Int = 0
+ def method2921: Int = 0
+ def method2922: Int = 0
+ def method2923: Int = 0
+ def method2924: Int = 0
+ def method2925: Int = 0
+ def method2926: Int = 0
+ def method2927: Int = 0
+ def method2928: Int = 0
+ def method2929: Int = 0
+ def method2930: Int = 0
+ def method2931: Int = 0
+ def method2932: Int = 0
+ def method2933: Int = 0
+ def method2934: Int = 0
+ def method2935: Int = 0
+ def method2936: Int = 0
+ def method2937: Int = 0
+ def method2938: Int = 0
+ def method2939: Int = 0
+ def method2940: Int = 0
+ def method2941: Int = 0
+ def method2942: Int = 0
+ def method2943: Int = 0
+ def method2944: Int = 0
+ def method2945: Int = 0
+ def method2946: Int = 0
+ def method2947: Int = 0
+ def method2948: Int = 0
+ def method2949: Int = 0
+ def method2950: Int = 0
+ def method2951: Int = 0
+ def method2952: Int = 0
+ def method2953: Int = 0
+ def method2954: Int = 0
+ def method2955: Int = 0
+ def method2956: Int = 0
+ def method2957: Int = 0
+ def method2958: Int = 0
+ def method2959: Int = 0
+ def method2960: Int = 0
+ def method2961: Int = 0
+ def method2962: Int = 0
+ def method2963: Int = 0
+ def method2964: Int = 0
+ def method2965: Int = 0
+ def method2966: Int = 0
+ def method2967: Int = 0
+ def method2968: Int = 0
+ def method2969: Int = 0
+ def method2970: Int = 0
+ def method2971: Int = 0
+ def method2972: Int = 0
+ def method2973: Int = 0
+ def method2974: Int = 0
+ def method2975: Int = 0
+ def method2976: Int = 0
+ def method2977: Int = 0
+ def method2978: Int = 0
+ def method2979: Int = 0
+ def method2980: Int = 0
+ def method2981: Int = 0
+ def method2982: Int = 0
+ def method2983: Int = 0
+ def method2984: Int = 0
+ def method2985: Int = 0
+ def method2986: Int = 0
+ def method2987: Int = 0
+ def method2988: Int = 0
+ def method2989: Int = 0
+ def method2990: Int = 0
+ def method2991: Int = 0
+ def method2992: Int = 0
+ def method2993: Int = 0
+ def method2994: Int = 0
+ def method2995: Int = 0
+ def method2996: Int = 0
+ def method2997: Int = 0
+ def method2998: Int = 0
+ def method2999: Int = 0
+}
diff --git a/test/files/run/t7558.scala b/test/files/run/t7558.scala
new file mode 100644
index 0000000000..bfcaaba5cc
--- /dev/null
+++ b/test/files/run/t7558.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ import scala.tools.reflect.ToolBox
+ val tb = cm.mkToolBox()
+ val t = { var x = "ab".toList; u.reify { x = x.reverse; x }.tree }
+ val evaluated = tb.eval(t)
+ assert(evaluated == "ba".toList, evaluated)
+}
diff --git a/test/files/run/t874.scala b/test/files/run/t874.scala
index 41d124f728..7e70fc34eb 100644
--- a/test/files/run/t874.scala
+++ b/test/files/run/t874.scala
@@ -1,3 +1,5 @@
+
+import scala.language.{ reflectiveCalls }
object Test {
abstract class Base {
val U: {
diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check
index f123bc8f25..10384ac46e 100644
--- a/test/files/run/tailcalls.check
+++ b/test/files/run/tailcalls.check
@@ -1,3 +1,4 @@
+#partest !avian
test Object .f was successful
test Final .f was successful
test Class .f raised exception java.lang.StackOverflowError
@@ -51,3 +52,58 @@ test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
test FancyTailCalls.differentInstance was successful
test PolyObject.tramp was successful
+
+#partest avian
+test Object .f was successful
+test Final .f was successful
+test Class .f was successful
+test SubClass .f was successful
+test Sealed .f was successful
+test SubSealed.f was successful
+
+test O .f was successful
+test c .f was successful
+test O.O .f was successful
+test O.c .f was successful
+test c.O .f was successful
+test c.c .f was successful
+test O.O.O .f was successful
+test O.O.c .f was successful
+test O.c.O .f was successful
+test O.c.c .f was successful
+test c.O.O .f was successful
+test c.O.c .f was successful
+test c.c.O .f was successful
+test c.c.c .f was successful
+test O.O.O.O.f was successful
+test O.O.O.c.f was successful
+test O.O.c.O.f was successful
+test O.O.c.c.f was successful
+test O.c.O.O.f was successful
+test O.c.O.c.f was successful
+test O.c.c.O.f was successful
+test O.c.c.c.f was successful
+test c.O.O.O.f was successful
+test c.O.O.c.f was successful
+test c.O.c.O.f was successful
+test c.O.c.c.f was successful
+test c.c.O.O.f was successful
+test c.c.O.c.f was successful
+test c.c.c.O.f was successful
+test c.c.c.c.f was successful
+
+test TailCall.f1 was successful
+test TailCall.f2 was successful
+test TailCall.f3 was successful
+test TailCall.g1 was successful
+test TailCall.g2 was successful
+test TailCall.g3 was successful
+test TailCall.h1 was successful
+
+test NonTailCall.f1 0 1 2 was successful
+test NonTailCall.f2
+test TailCall.b1 was successful
+test TailCall.b2 was successful
+test FancyTailCalls.tcTryLocal was successful
+test FancyTailCalls.differentInstance was successful
+test PolyObject.tramp was successful \ No newline at end of file
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 7d06a7e69d..1d4124e138 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -307,7 +307,7 @@ object Test {
def main(args: Array[String]) {
// compute min and max iteration number
val min = 16;
- val max = calibrate;
+ val max = if (scala.tools.partest.utils.Properties.isAvian) 10000 else calibrate
// test tail calls in different contexts
val Final = new Final()
diff --git a/test/files/run/tcpoly_monads.scala b/test/files/run/tcpoly_monads.scala
index cffbcc963b..6372851451 100644
--- a/test/files/run/tcpoly_monads.scala
+++ b/test/files/run/tcpoly_monads.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, implicitConversions }
+
trait Monads {
/**
* class Monad m where
diff --git a/test/files/run/tcpoly_overriding.scala b/test/files/run/tcpoly_overriding.scala
index 86ba89f530..32174ad8bf 100644
--- a/test/files/run/tcpoly_overriding.scala
+++ b/test/files/run/tcpoly_overriding.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds }
+
abstract class A[t[x]] {
def b: t[Int]
}
diff --git a/test/files/run/tcpoly_parseridioms.check b/test/files/run/tcpoly_parseridioms.check
index 5fff2efb15..ab829e0a0e 100644
--- a/test/files/run/tcpoly_parseridioms.check
+++ b/test/files/run/tcpoly_parseridioms.check
@@ -1 +1,21 @@
+tcpoly_parseridioms.scala:18: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+ case Success(next, x) => b(next) match {
+ ^
+tcpoly_parseridioms.scala:17: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+ def apply(in: Input): ParseResult[Pair[T, U]] = a(in) match {
+ ^
+tcpoly_parseridioms.scala:30: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+ case Failure(_, _) => b(in) match {
+ ^
+tcpoly_parseridioms.scala:28: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+ def apply(in: Input): ParseResult[T] = a(in) match {
+ ^
+tcpoly_parseridioms.scala:39: warning: match may not be exhaustive.
+It would fail on the following input: ParseResult()
+ def apply(in: Input): ParseResult[U] = a(in) match {
+ ^
Success(List(),Plus(1,2))
diff --git a/test/files/run/tcpoly_parseridioms.scala b/test/files/run/tcpoly_parseridioms.scala
index 634240e44d..ec17e062db 100644
--- a/test/files/run/tcpoly_parseridioms.scala
+++ b/test/files/run/tcpoly_parseridioms.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, implicitConversions, postfixOps }
+
trait Parsers {
type Input = List[Char]
diff --git a/test/files/run/toolbox_console_reporter.scala b/test/files/run/toolbox_console_reporter.scala
index d672ccb9cb..ce28086c25 100644
--- a/test/files/run/toolbox_console_reporter.scala
+++ b/test/files/run/toolbox_console_reporter.scala
@@ -4,10 +4,10 @@ import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.{ToolBox, mkConsoleFrontEnd}
object Test extends App {
- val oldErr = Console.err;
- val baos = new java.io.ByteArrayOutputStream();
- Console.setErr(new java.io.PrintStream(baos));
- try {
+ //val oldErr = Console.err;
+ val baos = new java.io.ByteArrayOutputStream()
+ val errs = new java.io.PrintStream(baos)
+ (Console withErr errs) {
val toolbox = cm.mkToolBox(frontEnd = mkConsoleFrontEnd(), options = "-deprecation")
toolbox.eval(reify{
object Utils {
@@ -18,12 +18,11 @@ object Test extends App {
Utils.foo
}.tree)
println("============compiler console=============")
+ errs.flush()
println(baos.toString);
println("=========================================")
println("============compiler messages============")
toolbox.frontEnd.infos.foreach(println(_))
println("=========================================")
- } finally {
- Console.setErr(oldErr);
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/tpeCache-tyconCache.check b/test/files/run/tpeCache-tyconCache.check
new file mode 100644
index 0000000000..a892f5477a
--- /dev/null
+++ b/test/files/run/tpeCache-tyconCache.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala>
+
+scala> AnyRefClass.tpe eq AnyRefClass.typeConstructor
+res0: Boolean = true
+
+scala> AnyRefClass.tpe eq AnyRefClass.typeConstructor
+res1: Boolean = true
+
+scala>
diff --git a/test/files/run/tpeCache-tyconCache.scala b/test/files/run/tpeCache-tyconCache.scala
new file mode 100644
index 0000000000..f907167a33
--- /dev/null
+++ b/test/files/run/tpeCache-tyconCache.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+:power
+
+AnyRefClass.tpe eq AnyRefClass.typeConstructor
+AnyRefClass.tpe eq AnyRefClass.typeConstructor
+ """.trim
+}
diff --git a/test/files/run/transform.scala b/test/files/run/transform.scala
index 5cc1c49d6f..82f924950d 100644
--- a/test/files/run/transform.scala
+++ b/test/files/run/transform.scala
@@ -1,5 +1,5 @@
object Test {
- val x = 1 to 10 toBuffer
+ val x = (1 to 10).toBuffer
def main(args: Array[String]): Unit = {
x transform (_ * 2)
diff --git a/test/files/run/try-2.check b/test/files/run/try-2.check
index 6c4a024c93..987d3462df 100644
--- a/test/files/run/try-2.check
+++ b/test/files/run/try-2.check
@@ -1,3 +1,6 @@
+try-2.scala:41: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 10;
+ ^
exception happened
Nothin
diff --git a/test/files/run/try-2.scala b/test/files/run/try-2.scala
index da321f2668..b55977ba8b 100644
--- a/test/files/run/try-2.scala
+++ b/test/files/run/try-2.scala
@@ -12,42 +12,42 @@ object Test {
throw new Error();
}
catch {
- case _ => Console.println("exception happened\n");
+ case _: Throwable => Console.println("exception happened\n");
}
def tryUnitAll: Unit =
try {
Console.println("Nothin");
} catch {
- case _ => sys.error("Bad, bad, lama!");
+ case _: Throwable => sys.error("Bad, bad, lama!");
}
def tryAllAll: Unit =
try {
throw new Error();
} catch {
- case _ => sys.error("Bad, bad, lama!");
+ case _: Throwable => sys.error("Bad, bad, lama!");
}
def tryUnitUnit: Unit =
try {
Console.println("Nothin");
} catch {
- case _ => Console.println("Nothin");
+ case _: Throwable => Console.println("Nothin");
}
def tryIntUnit: Unit =
try {
10;
} catch {
- case _ => Console.println("Huh?");
+ case _: Throwable => Console.println("Huh?");
}
def execute(f: => Unit) = try {
f;
} catch {
- case _ => ();
+ case _: Throwable => ();
}
diff --git a/test/files/run/try-catch-unify.scala b/test/files/run/try-catch-unify.scala
index 8cb14d060e..151e549e5f 100644
--- a/test/files/run/try-catch-unify.scala
+++ b/test/files/run/try-catch-unify.scala
@@ -9,7 +9,7 @@ object Test {
try {
catching(classOf[NumberFormatException]) withTry (sys.error("O NOES"))
} catch {
- case t => println(t.getMessage)
+ case t: Throwable => println(t.getMessage)
}
println(nonFatalCatch withTry ("Hi".toDouble))
}
diff --git a/test/files/run/try.check b/test/files/run/try.check
index 3983e26060..f742ccb0df 100644
--- a/test/files/run/try.check
+++ b/test/files/run/try.check
@@ -1,3 +1,6 @@
+try.scala:65: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 1+1;
+ ^
1 + 1 = 2
1 + 1 = 2
1 + 1 = 2
diff --git a/test/files/run/try.scala b/test/files/run/try.scala
index e393c0b4b1..a4fdfd796b 100644
--- a/test/files/run/try.scala
+++ b/test/files/run/try.scala
@@ -45,7 +45,7 @@ object Test extends AnyRef with App {
instance = try {
"" //new String();
} catch {
- case _ =>
+ case _: Throwable =>
val cs = "aaa";
if (cs.length() > 0) {
"" //new String();
@@ -65,13 +65,13 @@ object Test extends AnyRef with App {
1+1;
()
} catch {
- case _ =>
+ case _: Throwable =>
Console.println("2");
sys.error("for good");
}
Console.println("a");
} catch {
- case _ => ();
+ case _: Throwable => ();
}
class A {
@@ -95,7 +95,7 @@ object Test extends AnyRef with App {
try {
null
} catch {
- case _ => null
+ case _: Throwable => null
}
new AnyRef {
diff --git a/test/files/run/tuple-zipped.scala b/test/files/run/tuple-zipped.scala
index b197183844..c8ba41f042 100644
--- a/test/files/run/tuple-zipped.scala
+++ b/test/files/run/tuple-zipped.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
object Test {
val xs1 = List.range(1, 100)
val xs2 = xs1.view
diff --git a/test/files/run/tuples.scala b/test/files/run/tuples.scala
index 4854e36c96..a4ea9ddbcc 100644
--- a/test/files/run/tuples.scala
+++ b/test/files/run/tuples.scala
@@ -6,6 +6,7 @@ object Test extends App {
Console.println(xyz)
xyz match {
case (1, "abc", true) => Console.println("OK")
+ case _ => ???
}
def func(x: Int, y: String, z: Double) {
Console.println("x = " + x + "; y = " + y + "; z = " + z);
diff --git a/test/files/run/type-currying.scala b/test/files/run/type-currying.scala
index f9764c64f0..5e31fef403 100644
--- a/test/files/run/type-currying.scala
+++ b/test/files/run/type-currying.scala
@@ -1,3 +1,6 @@
+
+
+import scala.language.{ higherKinds, reflectiveCalls }
import scala.collection.{ mutable, immutable, generic }
import generic.CanBuildFrom
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
index 8c558ced60..84e5435afe 100644
--- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
@@ -1,2 +1,2 @@
-pos: source-newSource1,line-9,offset=466 could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR
+pos: source-newSource1.scala,line-9,offset=466 could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
index acfecce628..8c9d07d836 100644
--- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
@@ -1,2 +1,2 @@
-pos: source-newSource1,line-9,offset=479 No Manifest available for App.this.T. ERROR
+pos: source-newSource1.scala,line-9,offset=479 No Manifest available for App.this.T. ERROR
diff --git a/test/files/run/unapply.check b/test/files/run/unapply.check
new file mode 100644
index 0000000000..847e3b381e
--- /dev/null
+++ b/test/files/run/unapply.check
@@ -0,0 +1,3 @@
+unapply.scala:57: warning: comparing values of types Null and Null using `==' will always yield true
+ assert(doMatch2(b) == null)
+ ^
diff --git a/test/files/run/unittest_io.scala b/test/files/run/unittest_io.scala
index 2cadb9b1df..2c3dacdf91 100644
--- a/test/files/run/unittest_io.scala
+++ b/test/files/run/unittest_io.scala
@@ -1,3 +1,5 @@
+
+@deprecated("Suppress warnings", since="2.11")
object Test {
def main(args: Array[String]) {
diff --git a/test/files/run/unreachable.scala b/test/files/run/unreachable.scala
index 50a8d88b7c..99ac730746 100644
--- a/test/files/run/unreachable.scala
+++ b/test/files/run/unreachable.scala
@@ -1,3 +1,6 @@
+import scala.util.Random.nextInt
+import scala.sys.error
+
object Test extends App {
def unreachableNormalExit: Int = {
return 42
@@ -6,14 +9,14 @@ object Test extends App {
def unreachableIf: Int = {
return 42
- if (util.Random.nextInt % 2 == 0)
+ if (nextInt % 2 == 0)
0
else
1
}
def unreachableIfBranches: Int = {
- if (util.Random.nextInt % 2 == 0)
+ if (nextInt % 2 == 0)
return 42
else
return 42
@@ -22,14 +25,14 @@ object Test extends App {
}
def unreachableOneLegIf: Int = {
- if (util.Random.nextInt % 2 == 0)
+ if (nextInt % 2 == 0)
return 42
return 42
}
def unreachableLeftBranch: Int = {
- val result = if (util.Random.nextInt % 2 == 0)
+ val result = if (nextInt % 2 == 0)
return 42
else
42
@@ -38,7 +41,7 @@ object Test extends App {
}
def unreachableRightBranch: Int = {
- val result = if (util.Random.nextInt % 2 == 0)
+ val result = if (nextInt % 2 == 0)
42
else
return 42
@@ -89,7 +92,7 @@ object Test extends App {
def unreachableSwitch: Int = {
return 42
- val x = util.Random.nextInt % 2
+ val x = nextInt % 2
x match {
case 0 => return 0
case 1 => return 1
@@ -99,7 +102,7 @@ object Test extends App {
}
def unreachableAfterSwitch: Int = {
- val x = util.Random.nextInt % 2
+ val x = nextInt % 2
x match {
case 0 => return 42
case 1 => return 41 + x
@@ -122,4 +125,4 @@ object Test extends App {
check(unreachableAfterFinally)
check(unreachableSwitch)
check(unreachableAfterSwitch)
-} \ No newline at end of file
+}
diff --git a/test/files/run/valueclasses-classmanifest-basic.scala b/test/files/run/valueclasses-classmanifest-basic.scala
index c2aa08ef86..50addda359 100644
--- a/test/files/run/valueclasses-classmanifest-basic.scala
+++ b/test/files/run/valueclasses-classmanifest-basic.scala
@@ -1,5 +1,6 @@
class Foo(val x: Int) extends AnyVal
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
println(classManifest[Foo])
-} \ No newline at end of file
+}
diff --git a/test/files/run/valueclasses-classmanifest-existential.scala b/test/files/run/valueclasses-classmanifest-existential.scala
index 11999df678..6bcd7cf942 100644
--- a/test/files/run/valueclasses-classmanifest-existential.scala
+++ b/test/files/run/valueclasses-classmanifest-existential.scala
@@ -1,5 +1,6 @@
class Foo[T](val x: T) extends AnyVal
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
println(classManifest[Foo[_]])
-} \ No newline at end of file
+}
diff --git a/test/files/run/valueclasses-classmanifest-generic.scala b/test/files/run/valueclasses-classmanifest-generic.scala
index 280152dc1d..5efcaed959 100644
--- a/test/files/run/valueclasses-classmanifest-generic.scala
+++ b/test/files/run/valueclasses-classmanifest-generic.scala
@@ -1,5 +1,6 @@
class Foo[T](val x: T) extends AnyVal
+@deprecated("Suppress warnings", since="2.11")
object Test extends App {
println(classManifest[Foo[String]])
-} \ No newline at end of file
+}
diff --git a/test/files/run/vector1.scala b/test/files/run/vector1.scala
index b37cfe82e8..2e335aded4 100644
--- a/test/files/run/vector1.scala
+++ b/test/files/run/vector1.scala
@@ -102,7 +102,7 @@ object Test {
seqBack()
}
} catch {
- case ex =>
+ case ex: Throwable =>
//println("----------------")
//a.debug
throw ex
diff --git a/test/files/run/view-iterator-stream.scala b/test/files/run/view-iterator-stream.scala
index f91407f92c..c172b5cb99 100644
--- a/test/files/run/view-iterator-stream.scala
+++ b/test/files/run/view-iterator-stream.scala
@@ -1,3 +1,6 @@
+
+import scala.language.postfixOps
+
import scala.collection.{ mutable, immutable, generic }
import collection.TraversableView
diff --git a/test/files/run/virtpatmat_alts.check b/test/files/run/virtpatmat_alts.check
index 7a4ad0a741..f39e292fef 100644
--- a/test/files/run/virtpatmat_alts.check
+++ b/test/files/run/virtpatmat_alts.check
@@ -1 +1,7 @@
+virtpatmat_alts.scala:5: warning: match may not be exhaustive.
+ (true, true) match {
+ ^
+virtpatmat_alts.scala:9: warning: match may not be exhaustive.
+ List(5) match {
+ ^
OK 5
diff --git a/test/files/run/virtpatmat_alts.flags b/test/files/run/virtpatmat_alts.flags
deleted file mode 100644
index 3f5a3100e4..0000000000
--- a/test/files/run/virtpatmat_alts.flags
+++ /dev/null
@@ -1 +0,0 @@
- -Xexperimental
diff --git a/test/files/run/virtpatmat_alts.scala b/test/files/run/virtpatmat_alts.scala
index b7717524e2..d1dfa8a4a1 100644
--- a/test/files/run/virtpatmat_alts.scala
+++ b/test/files/run/virtpatmat_alts.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: It would fail on the following input
+ */
object Test extends App {
(true, true) match {
case (true, true) | (false, false) => 1
@@ -9,4 +12,4 @@ object Test extends App {
case 7 :: Nil => println("FAILED")
case Nil => println("FAILED")
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_nested_lists.check b/test/files/run/virtpatmat_nested_lists.check
index d8263ee986..ddf68eeedd 100644
--- a/test/files/run/virtpatmat_nested_lists.check
+++ b/test/files/run/virtpatmat_nested_lists.check
@@ -1 +1,4 @@
-2 \ No newline at end of file
+virtpatmat_nested_lists.scala:5: warning: match may not be exhaustive.
+ List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) }
+ ^
+2
diff --git a/test/files/run/virtpatmat_nested_lists.flags b/test/files/run/virtpatmat_nested_lists.flags
deleted file mode 100644
index 3f5a3100e4..0000000000
--- a/test/files/run/virtpatmat_nested_lists.flags
+++ /dev/null
@@ -1 +0,0 @@
- -Xexperimental
diff --git a/test/files/run/virtpatmat_nested_lists.scala b/test/files/run/virtpatmat_nested_lists.scala
index fef74cea15..d1aa68ea93 100644
--- a/test/files/run/virtpatmat_nested_lists.scala
+++ b/test/files/run/virtpatmat_nested_lists.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: It would fail on the following input
+ */
object Test extends App {
List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) }
}
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/virtpatmat_opt_sharing.check
index d00491fd7e..78ec61f19d 100644
--- a/test/files/run/virtpatmat_opt_sharing.check
+++ b/test/files/run/virtpatmat_opt_sharing.check
@@ -1 +1,4 @@
+virtpatmat_opt_sharing.scala:7: warning: match may not be exhaustive.
+ List(1, 3, 4, 7) match {
+ ^
1
diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags
deleted file mode 100644
index 3f5a3100e4..0000000000
--- a/test/files/run/virtpatmat_opt_sharing.flags
+++ /dev/null
@@ -1 +0,0 @@
- -Xexperimental
diff --git a/test/files/run/virtpatmat_opt_sharing.scala b/test/files/run/virtpatmat_opt_sharing.scala
index 119e3050ea..d2c42cab48 100644
--- a/test/files/run/virtpatmat_opt_sharing.scala
+++ b/test/files/run/virtpatmat_opt_sharing.scala
@@ -1,3 +1,6 @@
+/*
+ * filter: It would fail on the following input
+ */
object Test extends App {
virtMatch()
def virtMatch() = {
@@ -7,4 +10,4 @@ object Test extends App {
case 1 :: 3 :: 4 :: 7 :: x => println(1)
}
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_staging.scala b/test/files/run/virtpatmat_staging.scala
index c17b45043b..d444829b02 100644
--- a/test/files/run/virtpatmat_staging.scala
+++ b/test/files/run/virtpatmat_staging.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ higherKinds, implicitConversions }
+
trait Intf {
type Rep[+T]
type M[+T] = Rep[Maybe[T]]
@@ -9,7 +12,7 @@ trait Intf {
def zero: M[Nothing]
def one[T](x: Rep[T]): M[T]
- def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T]
+ def guard[T](cond: Rep[Boolean], dann: => Rep[T]): M[T]
def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] // used for isDefinedAt
}
@@ -33,7 +36,7 @@ trait Impl extends Intf {
def runOrElse[T, U](in: Rep[T])(matcher: Rep[T] => M[U]): Rep[U] = ("runOrElse("+ in +", ?" + matcher("?") + ")")
def zero: M[Nothing] = "zero"
def one[T](x: Rep[T]): M[T] = "one("+x.toString+")"
- def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T] = "guard("+cond+","+then+")"
+ def guard[T](cond: Rep[Boolean], dann: => Rep[T]): M[T] = s"guard($cond,$dann)"
def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] = ("isSuccess("+x+", ?" + f("?") + ")")
}
diff --git a/test/files/run/virtpatmat_stringinterp.scala b/test/files/run/virtpatmat_stringinterp.scala
index 213712f17a..c6c951e6e5 100644
--- a/test/files/run/virtpatmat_stringinterp.scala
+++ b/test/files/run/virtpatmat_stringinterp.scala
@@ -1,3 +1,6 @@
+
+import scala.language.{ implicitConversions }
+
object Test extends App {
case class Node(x: Int)
@@ -10,4 +13,4 @@ object Test extends App {
val x: Node = Node(0)
x match { case xml"""<foo arg=$a/>""" => println(a) }
-} \ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_try.scala b/test/files/run/virtpatmat_try.scala
index 46e67cb72e..dab2c89227 100644
--- a/test/files/run/virtpatmat_try.scala
+++ b/test/files/run/virtpatmat_try.scala
@@ -8,7 +8,7 @@ object Test extends App {
} catch { // this should emit a "catch-switch"
case y: A => println(y.x)
case (_ : A | _ : B) => println("B")
- case _ => println("other")
+ case _: Throwable => println("other")
}
try {
@@ -17,7 +17,7 @@ object Test extends App {
// case A(x) => println(x)
case y: A => println(y.x)
case x@((_ : A) | (_ : B)) => println(x)
- case _ => println("other")
+ case _: Throwable => println("other")
}
def simpleTry {
@@ -34,7 +34,7 @@ object Test extends App {
}
def wildcardTry {
- try { bla } catch { case _ => bla }
+ try { bla } catch { case _: Throwable => bla }
}
def tryPlusFinally {
@@ -44,4 +44,4 @@ object Test extends App {
def catchAndPassToLambda {
try { bla } catch { case ex: Exception => val f = () => ex }
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/virtpatmat_typed.check b/test/files/run/virtpatmat_typed.check
index cec2740d18..9924d84148 100644
--- a/test/files/run/virtpatmat_typed.check
+++ b/test/files/run/virtpatmat_typed.check
@@ -1 +1,4 @@
+virtpatmat_typed.scala:5: warning: unreachable code
+ case x: String => println("FAILED")
+ ^
OK foo
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
index 67637674b2..dc155dbb02 100644
--- a/test/files/run/xml-loop-bug.scala
+++ b/test/files/run/xml-loop-bug.scala
@@ -1,8 +1,14 @@
+import java.io.{ Console => _, _ }
+import scala.io._
+import scala.xml.parsing._
object Test {
def main(args: Array[String]): Unit = {
- val sink = new java.io.PrintStream(new java.io.ByteArrayOutputStream())
- Console setOut sink
- Console setErr sink
- scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "), true).document.docElem
+ val xml = "<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "
+ val sink = new PrintStream(new ByteArrayOutputStream())
+ (Console withOut sink) {
+ (Console withErr sink) {
+ ConstructingParser.fromSource((Source fromString xml), true).document.docElem
+ }
+ }
}
}
diff --git a/test/files/scalacheck/HashTrieSplit.scala b/test/files/scalacheck/HashTrieSplit.scala
deleted file mode 100644
index 908c878f54..0000000000
--- a/test/files/scalacheck/HashTrieSplit.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-
-
-
-import collection._
-
-
-
-
-// checks whether hash tries split their iterators correctly
-// even after some elements have been traversed
-object Test {
- def main(args: Array[String]) {
- doesSplitOk
- }
-
- def doesSplitOk = {
- val sz = 2000
- var ht = new parallel.immutable.ParHashMap[Int, Int]
- // println("creating trie")
- for (i <- 0 until sz) ht += ((i + sz, i))
- // println("created trie")
- for (n <- 0 until (sz - 1)) {
- // println("---------> n = " + n)
- val pit = ht.splitter
- val pit2 = ht.splitter
- var i = 0
- while (i < n) {
- pit.next
- pit2.next
- i += 1
- }
- // println("splitting")
- val pits = pit.split
- val fst = pits(0).toSet
- val snd = pits(1).toSet
- val orig = pit2.toSet
- if (orig.size != (fst.size + snd.size) || orig != (fst ++ snd)) {
- println("Original: " + orig)
- println("First: " + fst)
- println("Second: " + snd)
- assert(false)
- }
- }
- }
-}
diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala
index af79ad49e3..02003bd271 100644
--- a/test/files/scalacheck/avl.scala
+++ b/test/files/scalacheck/avl.scala
@@ -2,14 +2,12 @@ import org.scalacheck.Gen
import org.scalacheck.Prop.forAll
import org.scalacheck.Properties
-import util.logging.ConsoleLogger
-
package scala.collection.mutable {
/**
* Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1]
*/
- abstract class AVLTreeTest(name: String) extends Properties(name) with ConsoleLogger {
+ abstract class AVLTreeTest(name: String) extends Properties(name) {
def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2)
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index 0a91977da0..e6b6b4856d 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -1,12 +1,11 @@
-
-
-
+/*
+ * scalac: -deprecation
+ * scalacheck: -workers 1 -minSize 0 -maxSize 4000 -minSuccessfulTests 5
+ */
import org.scalacheck._
-
import scala.collection.parallel._
-
class ParCollProperties extends Properties("Parallel collections") {
/* Collections */
@@ -35,8 +34,8 @@ class ParCollProperties extends Properties("Parallel collections") {
include(immutable.IntParallelVectorCheck)
}
-
-object Test {
+object Test extends ParCollProperties {
+ /*
def main(args: Array[String]) {
val pc = new ParCollProperties
org.scalacheck.Test.checkProperties(
@@ -51,4 +50,5 @@ object Test {
pc
)
}
+ */
}
diff --git a/test/files/scalap/abstractClass/result.test b/test/files/scalap/abstractClass.check
index ef1daac23d..ef1daac23d 100644
--- a/test/files/scalap/abstractClass/result.test
+++ b/test/files/scalap/abstractClass.check
diff --git a/test/files/scalap/abstractClass/A.scala b/test/files/scalap/abstractClass.scala
index 19a528d5a1..19a528d5a1 100644
--- a/test/files/scalap/abstractClass/A.scala
+++ b/test/files/scalap/abstractClass.scala
diff --git a/test/files/scalap/abstractMethod/result.test b/test/files/scalap/abstractMethod.check
index 40fa02d408..40fa02d408 100644
--- a/test/files/scalap/abstractMethod/result.test
+++ b/test/files/scalap/abstractMethod.check
diff --git a/test/files/scalap/abstractMethod/A.scala b/test/files/scalap/abstractMethod.scala
index 4bedb377b3..4bedb377b3 100644
--- a/test/files/scalap/abstractMethod/A.scala
+++ b/test/files/scalap/abstractMethod.scala
diff --git a/test/files/scalap/caseClass/result.test b/test/files/scalap/caseClass.check
index 7d7aa4fd8f..7d7aa4fd8f 100644
--- a/test/files/scalap/caseClass/result.test
+++ b/test/files/scalap/caseClass.check
diff --git a/test/files/scalap/caseClass/A.scala b/test/files/scalap/caseClass.scala
index 95f9984519..95f9984519 100644
--- a/test/files/scalap/caseClass/A.scala
+++ b/test/files/scalap/caseClass.scala
diff --git a/test/files/scalap/caseObject/result.test b/test/files/scalap/caseObject.check
index 867a4b2162..867a4b2162 100644
--- a/test/files/scalap/caseObject/result.test
+++ b/test/files/scalap/caseObject.check
diff --git a/test/files/scalap/caseObject/A.scala b/test/files/scalap/caseObject.scala
index 6a3ff10d75..6a3ff10d75 100644
--- a/test/files/scalap/caseObject/A.scala
+++ b/test/files/scalap/caseObject.scala
diff --git a/test/files/scalap/cbnParam/result.test b/test/files/scalap/cbnParam.check
index 52ecb6ae66..52ecb6ae66 100644
--- a/test/files/scalap/cbnParam/result.test
+++ b/test/files/scalap/cbnParam.check
diff --git a/test/files/scalap/cbnParam/A.scala b/test/files/scalap/cbnParam.scala
index 2f366df64a..2f366df64a 100644
--- a/test/files/scalap/cbnParam/A.scala
+++ b/test/files/scalap/cbnParam.scala
diff --git a/test/files/scalap/classPrivate/result.test b/test/files/scalap/classPrivate.check
index ab2d40cdaf..ab2d40cdaf 100644
--- a/test/files/scalap/classPrivate/result.test
+++ b/test/files/scalap/classPrivate.check
diff --git a/test/files/scalap/classPrivate/A.scala b/test/files/scalap/classPrivate.scala
index 9f1bd34a6a..9f1bd34a6a 100644
--- a/test/files/scalap/classPrivate/A.scala
+++ b/test/files/scalap/classPrivate.scala
diff --git a/test/files/scalap/classWithExistential/result.test b/test/files/scalap/classWithExistential.check
index caee3fd6de..caee3fd6de 100644
--- a/test/files/scalap/classWithExistential/result.test
+++ b/test/files/scalap/classWithExistential.check
diff --git a/test/files/scalap/classWithExistential/A.scala b/test/files/scalap/classWithExistential.scala
index 4a5213f963..4a5213f963 100644
--- a/test/files/scalap/classWithExistential/A.scala
+++ b/test/files/scalap/classWithExistential.scala
diff --git a/test/files/scalap/classWithSelfAnnotation/result.test b/test/files/scalap/classWithSelfAnnotation.check
index 82bbd9e8df..82bbd9e8df 100644
--- a/test/files/scalap/classWithSelfAnnotation/result.test
+++ b/test/files/scalap/classWithSelfAnnotation.check
diff --git a/test/files/scalap/classWithSelfAnnotation/A.scala b/test/files/scalap/classWithSelfAnnotation.scala
index 9e0398622a..9e0398622a 100644
--- a/test/files/scalap/classWithSelfAnnotation/A.scala
+++ b/test/files/scalap/classWithSelfAnnotation.scala
diff --git a/test/files/scalap/covariantParam/result.test b/test/files/scalap/covariantParam.check
index f7a3c98966..f7a3c98966 100644
--- a/test/files/scalap/covariantParam/result.test
+++ b/test/files/scalap/covariantParam.check
diff --git a/test/files/scalap/covariantParam/A.scala b/test/files/scalap/covariantParam.scala
index 5b2c24d6fa..5b2c24d6fa 100644
--- a/test/files/scalap/covariantParam/A.scala
+++ b/test/files/scalap/covariantParam.scala
diff --git a/test/files/scalap/defaultParameter/result.test b/test/files/scalap/defaultParameter.check
index 0c775ea7b5..0c775ea7b5 100644
--- a/test/files/scalap/defaultParameter/result.test
+++ b/test/files/scalap/defaultParameter.check
diff --git a/test/files/scalap/defaultParameter/A.scala b/test/files/scalap/defaultParameter.scala
index d3514952f4..d3514952f4 100644
--- a/test/files/scalap/defaultParameter/A.scala
+++ b/test/files/scalap/defaultParameter.scala
diff --git a/test/files/scalap/implicitParam/result.test b/test/files/scalap/implicitParam.check
index a2cfd6092d..a2cfd6092d 100644
--- a/test/files/scalap/implicitParam/result.test
+++ b/test/files/scalap/implicitParam.check
diff --git a/test/files/scalap/implicitParam/A.scala b/test/files/scalap/implicitParam.scala
index 80657218d9..80657218d9 100644
--- a/test/files/scalap/implicitParam/A.scala
+++ b/test/files/scalap/implicitParam.scala
diff --git a/test/files/scalap/packageObject/result.test b/test/files/scalap/packageObject.check
index 5732d92958..5732d92958 100644
--- a/test/files/scalap/packageObject/result.test
+++ b/test/files/scalap/packageObject.check
diff --git a/test/files/scalap/packageObject/A.scala b/test/files/scalap/packageObject.scala
index 7e429c9935..7e429c9935 100644
--- a/test/files/scalap/packageObject/A.scala
+++ b/test/files/scalap/packageObject.scala
diff --git a/test/files/scalap/paramClauses/result.test b/test/files/scalap/paramClauses.check
index 3a141e8faf..3a141e8faf 100644
--- a/test/files/scalap/paramClauses/result.test
+++ b/test/files/scalap/paramClauses.check
diff --git a/test/files/scalap/paramClauses/A.scala b/test/files/scalap/paramClauses.scala
index f9d1917402..f9d1917402 100644
--- a/test/files/scalap/paramClauses/A.scala
+++ b/test/files/scalap/paramClauses.scala
diff --git a/test/files/scalap/paramNames/result.test b/test/files/scalap/paramNames.check
index 85e37f858d..85e37f858d 100644
--- a/test/files/scalap/paramNames/result.test
+++ b/test/files/scalap/paramNames.check
diff --git a/test/files/scalap/paramNames/A.scala b/test/files/scalap/paramNames.scala
index 7ba9ff0feb..7ba9ff0feb 100644
--- a/test/files/scalap/paramNames/A.scala
+++ b/test/files/scalap/paramNames.scala
diff --git a/test/files/scalap/sequenceParam/result.test b/test/files/scalap/sequenceParam.check
index 142d92fea3..142d92fea3 100644
--- a/test/files/scalap/sequenceParam/result.test
+++ b/test/files/scalap/sequenceParam.check
diff --git a/test/files/scalap/sequenceParam/A.scala b/test/files/scalap/sequenceParam.scala
index 86e13340b9..86e13340b9 100644
--- a/test/files/scalap/sequenceParam/A.scala
+++ b/test/files/scalap/sequenceParam.scala
diff --git a/test/files/scalap/simpleClass/result.test b/test/files/scalap/simpleClass.check
index 4fdf25d1cf..4fdf25d1cf 100644
--- a/test/files/scalap/simpleClass/result.test
+++ b/test/files/scalap/simpleClass.check
diff --git a/test/files/scalap/simpleClass/A.scala b/test/files/scalap/simpleClass.scala
index fa82e62680..fa82e62680 100644
--- a/test/files/scalap/simpleClass/A.scala
+++ b/test/files/scalap/simpleClass.scala
diff --git a/test/files/scalap/traitObject/result.test b/test/files/scalap/traitObject.check
index 104ba14f1a..104ba14f1a 100644
--- a/test/files/scalap/traitObject/result.test
+++ b/test/files/scalap/traitObject.check
diff --git a/test/files/scalap/traitObject/A.scala b/test/files/scalap/traitObject.scala
index d5f43181c1..d5f43181c1 100644
--- a/test/files/scalap/traitObject/A.scala
+++ b/test/files/scalap/traitObject.scala
diff --git a/test/files/scalap/typeAnnotations/result.test b/test/files/scalap/typeAnnotations.check
index 407b0235c6..407b0235c6 100644
--- a/test/files/scalap/typeAnnotations/result.test
+++ b/test/files/scalap/typeAnnotations.check
diff --git a/test/files/scalap/typeAnnotations/A.scala b/test/files/scalap/typeAnnotations.scala
index ff2445edc9..ff2445edc9 100644
--- a/test/files/scalap/typeAnnotations/A.scala
+++ b/test/files/scalap/typeAnnotations.scala
diff --git a/test/files/scalap/valAndVar/result.test b/test/files/scalap/valAndVar.check
index e940da9801..e940da9801 100644
--- a/test/files/scalap/valAndVar/result.test
+++ b/test/files/scalap/valAndVar.check
diff --git a/test/files/scalap/valAndVar/A.scala b/test/files/scalap/valAndVar.scala
index 2d89348401..2d89348401 100644
--- a/test/files/scalap/valAndVar/A.scala
+++ b/test/files/scalap/valAndVar.scala
diff --git a/test/files/scalap/wildcardType/result.test b/test/files/scalap/wildcardType.check
index e43261db32..e43261db32 100644
--- a/test/files/scalap/wildcardType/result.test
+++ b/test/files/scalap/wildcardType.check
diff --git a/test/files/scalap/wildcardType/A.scala b/test/files/scalap/wildcardType.scala
index 4bb0d14de5..4bb0d14de5 100644
--- a/test/files/scalap/wildcardType/A.scala
+++ b/test/files/scalap/wildcardType.scala
diff --git a/test/files/specialized/spec-matrix-old.scala b/test/files/specialized/spec-matrix-old.scala
index 98735c8c03..83941e80a7 100644
--- a/test/files/specialized/spec-matrix-old.scala
+++ b/test/files/specialized/spec-matrix-old.scala
@@ -1,6 +1,7 @@
/** Test matrix multiplication with specialization.
*/
+@deprecated("Suppress warnings", since="2.11")
class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
private val arr: Array[Array[A]] = Array.ofDim[A](rows, cols)
@@ -25,6 +26,7 @@ class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
}
}
+@deprecated("Suppress warnings", since="2.11")
object Test {
def main(args: Array[String]) {
val m = randomMatrix(200, 100)
diff --git a/test/files/specialized/spec-super.check b/test/files/specialized/spec-super.check
index 4be83ca9e6..2f4d60018b 100644
--- a/test/files/specialized/spec-super.check
+++ b/test/files/specialized/spec-super.check
@@ -1,3 +1,6 @@
+spec-super.scala:18: warning: class Base must be a trait. Specialized version of class Extended will inherit generic Base[Int]
+class Extended [@specialized(Int) T](t: T) extends Base[T](t) {
+ ^
s
1
-2 \ No newline at end of file
+2
diff --git a/test/files/specialized/spec-t3896.scala b/test/files/specialized/spec-t3896.scala
index 605ed0df9d..3a3be3da2b 100644
--- a/test/files/specialized/spec-t3896.scala
+++ b/test/files/specialized/spec-t3896.scala
@@ -12,7 +12,7 @@ object Test {
def main(args: Array[String]): Unit = {
val e = new AtomicBoolean(false)
val x = e.f( (a : Boolean) => !a ) // ok
- println( e.f( (a : Boolean) => !a ) toString ) // ok
+ println( e.f( (a : Boolean) => !a ).toString ) // ok
println( e.f( (a : Boolean) => !a) ) // compiler crash
println(runtime.BoxesRunTime.integerBoxCount)
diff --git a/test/files/specialized/tb3651.check b/test/files/specialized/tb3651.check
index c227083464..8a3f686ef5 100644
--- a/test/files/specialized/tb3651.check
+++ b/test/files/specialized/tb3651.check
@@ -1 +1,4 @@
-0 \ No newline at end of file
+tb3651.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ lk.a
+ ^
+0
diff --git a/test/files/specialized/tc3651.check b/test/files/specialized/tc3651.check
index c227083464..e2dbadf22c 100644
--- a/test/files/specialized/tc3651.check
+++ b/test/files/specialized/tc3651.check
@@ -1 +1,4 @@
-0 \ No newline at end of file
+tc3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ lk.a
+ ^
+0
diff --git a/test/files/specialized/td3651.check b/test/files/specialized/td3651.check
index 9aea9e0ce5..1a709fd0a7 100644
--- a/test/files/specialized/td3651.check
+++ b/test/files/specialized/td3651.check
@@ -1,2 +1,8 @@
+td3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ b.a
+ ^
+td3651.scala:16: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ der.a
+ ^
+0
0
-0 \ No newline at end of file
diff --git a/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala b/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala
new file mode 100644
index 0000000000..7e3b35c7d6
--- /dev/null
+++ b/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala
@@ -0,0 +1,171 @@
+package scala.reflect.internal.util
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class WeakHashSetTest {
+
+ // a class guaranteed to provide hash collisions
+ case class Collider(x : String) extends Comparable[Collider] with Serializable {
+ override def hashCode = 0
+ def compareTo(y : Collider) = this.x compareTo y.x
+ }
+
+ // basic emptiness check
+ @Test
+ def checkEmpty {
+ val hs = new WeakHashSet[String]()
+ assert(hs.size == 0)
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works
+ @Test
+ def checkPlusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains "hello")
+ assert(hs contains "goodbye")
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works when there are collisions
+ @Test
+ def checkPlusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]()
+ val elements = List("hello", "goodbye") map Collider
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains Collider("hello"))
+ assert(hs contains Collider("goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // add a large number of elements to force rehashing and then validate
+ @Test
+ def checkRehashing {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure rehashing works properly when the set is rehashed
+ @Test
+ def checkRehashCollisions {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // test that unreferenced objects are removed
+ // not run in an automated environment because gc behavior can't be relied on
+ //@Test
+ def checkRemoveUnreferencedObjects {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ // don't throw the following into a retained collection so gc
+ // can remove them
+ for (i <- 0 until size) {
+ hs += Collider("b" + i)
+ }
+ System.gc()
+ Thread.sleep(1000)
+ assert(hs.size == 200)
+ elements foreach {i => assert(hs contains i)}
+ for (i <- 0 until size) {
+ assert(!(hs contains Collider("b" + i)))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure findOrUpdate returns the originally entered element
+ @Test
+ def checkFindOrUpdate {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach {x => assert(hs findEntryOrUpdate x eq x)}
+ for (i <- 0 until size) {
+ // when we do a lookup the result should be the same reference we
+ // original put in
+ assert(hs findEntryOrUpdate(Collider("a" + i)) eq elements(i))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= functionality
+ @Test
+ def checkMinusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ hs -= "goodbye"
+ assert(hs.size == 1)
+ assert(hs contains "hello")
+ assert(!(hs contains "goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= when there are collisions
+ @Test
+ def checkMinusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = List(Collider("hello"), Collider("goodbye"))
+ elements foreach (hs += _)
+ hs -= Collider("goodbye")
+ assert(hs.size == 1)
+ assert(hs contains Collider("hello"))
+ assert(!(hs contains Collider("goodbye")))
+ hs -= Collider("hello")
+ assert(hs.size == 0)
+ assert(!(hs contains Collider("hello")))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the clear method actually cleans everything
+ @Test
+ def checkClear {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ hs.clear()
+ assert(hs.size == 0)
+ elements foreach {i => assert(!(hs contains i))}
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents
+ @Test
+ def checkIterator {
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until 20).toList map ("a" + _)
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents even when there is a collision
+ @Test
+ def checkIteratorCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = (0 until 20).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+
+}
diff --git a/test/junit/scala/tools/nsc/SampleTest.scala b/test/junit/scala/tools/nsc/SampleTest.scala
new file mode 100644
index 0000000000..8e026da1ea
--- /dev/null
+++ b/test/junit/scala/tools/nsc/SampleTest.scala
@@ -0,0 +1,17 @@
+package scala.tools.nsc
+package test
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+/** Sample JUnit test that shows that all pieces
+ of JUnit infrastructure work correctly */
+@RunWith(classOf[JUnit4])
+class SampleTest {
+ @Test
+ def testMath: Unit = {
+ assert(2+2 == 4, "you didn't get the math right fellow")
+ }
+}
diff --git a/test/partest b/test/partest
index ec66f5c048..99a731a49b 100755
--- a/test/partest
+++ b/test/partest
@@ -1,7 +1,8 @@
-#!/bin/sh
+#!/usr/bin/env bash
+#
##############################################################################
-# Scala test runner 2.8.0
+# Scala test runner 2.10.0
##############################################################################
# (c) 2002-2013 LAMP/EPFL
#
@@ -10,6 +11,16 @@
# PARTICULAR PURPOSE.
##############################################################################
+# Use tput to detect color-capable terminal.
+term_colors=$(tput colors 2>/dev/null)
+if [[ $? == 0 ]] && [[ $term_colors -gt 2 ]]; then
+ git_diff_options="--color=always --word-diff"
+ color_opts="-Dpartest.colors=$term_colors"
+else
+ unset color_opts
+ git_diff_options="--nocolor"
+fi
+
cygwin=false;
darwin=false;
case "`uname`" in
@@ -64,6 +75,22 @@ if [ -z "$EXT_CLASSPATH" ] ; then
fi
fi
+# Locate a javac command
+# Try: JAVA_HOME, sibling to specific JAVACMD, or PATH
+# Don't fail if there is no javac, since not all tests require it.
+if [ -z "$JAVAC_CMD" ] ; then
+ if [ -n "${JAVA_HOME}" ] && [ -f "${JAVA_HOME}/bin/javac" ] ; then
+ JAVAC_CMD="${JAVA_HOME}/bin/javac"
+ fi
+ if [ -z "$JAVAC_CMD" ] && [ -n "$JAVACMD" ] ; then
+ JDIR=`dirname "${JAVACMD}"`
+ JAVAC_CMD="${JDIR}/javac"
+ fi
+ if [ -z "$JAVAC_CMD" ] ; then
+ JAVAC_CMD=`type -p javac`
+ fi
+fi
+
if $cygwin; then
if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ; then
format=mixed
@@ -76,6 +103,9 @@ if $cygwin; then
if [ -n "${JAVACMD}" ] ; then
JAVACMD=`cygpath --$format "$JAVACMD"`
fi
+ if [ -n "${JAVAC_CMD}" ] ; then
+ JAVAC_CMD=`cygpath --$format "$JAVAC_CMD"`
+ fi
SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
fi
@@ -86,8 +116,8 @@ fi
JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
# the ant task doesn't supply any options by default,
-# so don't to that here either -- note that you may want to pass -optimise
-# to mimic what happens during nightlies
+# so don't do that here either -- note that you may want to pass -optimise
+# to mimic what happens during nightlies.
# [ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
partestDebugStr=""
@@ -98,9 +128,11 @@ fi
"${JAVACMD:=java}" \
$JAVA_OPTS -cp "$EXT_CLASSPATH" \
${partestDebugStr} \
+ "$color_opts" \
+ -Dfile.encoding=UTF-8 \
-Dscala.home="${SCALA_HOME}" \
-Dpartest.javacmd="${JAVACMD}" \
-Dpartest.java_opts="${JAVA_OPTS}" \
-Dpartest.scalac_opts="${SCALAC_OPTS}" \
- -Dpartest.javac_cmd="${JAVA_HOME}/bin/javac" \
+ -Dpartest.javac_cmd="${JAVAC_CMD}" \
scala.tools.partest.nest.NestRunner "$@"
diff --git a/test/pending/neg/plugin-after-terminal.flags b/test/pending/neg/plugin-after-terminal.flags
deleted file mode 100644
index 6a44376213..0000000000
--- a/test/pending/neg/plugin-after-terminal.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-after-terminal/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 3e382f3a12..0000000000
--- a/test/pending/neg/plugin-after-terminal/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f174c50c4363c492362a05c72dd45b0da18fdcd8 ?plugins.jar
diff --git a/test/pending/neg/plugin-after-terminal/misc/build.sh b/test/pending/neg/plugin-after-terminal/misc/build.sh
deleted file mode 100755
index 8899009d7f..0000000000
--- a/test/pending/neg/plugin-after-terminal/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
- mkdir -p build
- ${SCALAC} -d build src/*.scala
- jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
- rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-after-terminal/testsource.scala b/test/pending/neg/plugin-after-terminal/testsource.scala
deleted file mode 100644
index 519d162fdf..0000000000
--- a/test/pending/neg/plugin-after-terminal/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- println("afterterminal")
-}
-
diff --git a/test/pending/neg/plugin-before-parser.flags b/test/pending/neg/plugin-before-parser.flags
deleted file mode 100644
index 632530922c..0000000000
--- a/test/pending/neg/plugin-before-parser.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-before-parser/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
deleted file mode 100644
index e82eed76ce..0000000000
--- a/test/pending/neg/plugin-before-parser/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d7b100ad483484b598b7cd643424bd2e33898a0d ?plugins.jar
diff --git a/test/pending/neg/plugin-before-parser/misc/build.sh b/test/pending/neg/plugin-before-parser/misc/build.sh
deleted file mode 100755
index 8899009d7f..0000000000
--- a/test/pending/neg/plugin-before-parser/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
- mkdir -p build
- ${SCALAC} -d build src/*.scala
- jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
- rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-before-parser/testsource.scala b/test/pending/neg/plugin-before-parser/testsource.scala
deleted file mode 100644
index 9928aaa83c..0000000000
--- a/test/pending/neg/plugin-before-parser/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- println("beforeparser")
-}
-
diff --git a/test/pending/neg/plugin-cyclic-dependency.check b/test/pending/neg/plugin-cyclic-dependency.check
deleted file mode 100644
index a29bc3f5be..0000000000
--- a/test/pending/neg/plugin-cyclic-dependency.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error: fatal error: Cycle in compiler phase dependencies detected, phase cyclicdependency1 reacted twice!
-one error found
diff --git a/test/pending/neg/plugin-cyclic-dependency.flags b/test/pending/neg/plugin-cyclic-dependency.flags
deleted file mode 100644
index 8716aaa65f..0000000000
--- a/test/pending/neg/plugin-cyclic-dependency.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-cyclic-dependency/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 7e565e9e61..0000000000
--- a/test/pending/neg/plugin-cyclic-dependency/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7e6be9e33a87194e7061f94f6be115619f91ada2 ?plugins.jar
diff --git a/test/pending/neg/plugin-cyclic-dependency/misc/build.sh b/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
deleted file mode 100755
index 8899009d7f..0000000000
--- a/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
- mkdir -p build
- ${SCALAC} -d build src/*.scala
- jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
- rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-cyclic-dependency/testsource.scala b/test/pending/neg/plugin-cyclic-dependency/testsource.scala
deleted file mode 100644
index f1513ec9a0..0000000000
--- a/test/pending/neg/plugin-cyclic-dependency/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- println("cyclicdependency")
-}
-
diff --git a/test/pending/neg/plugin-multiple-rafter.flags b/test/pending/neg/plugin-multiple-rafter.flags
deleted file mode 100644
index dcae7f2f96..0000000000
--- a/test/pending/neg/plugin-multiple-rafter.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-multiple-rafter/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
deleted file mode 100644
index f4905fcbd4..0000000000
--- a/test/pending/neg/plugin-multiple-rafter/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2bda582b574287429ad5ee2e1d9a3effc88b0a5f ?plugins.jar
diff --git a/test/pending/neg/plugin-multiple-rafter/misc/build.sh b/test/pending/neg/plugin-multiple-rafter/misc/build.sh
deleted file mode 100755
index 8899009d7f..0000000000
--- a/test/pending/neg/plugin-multiple-rafter/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
- mkdir -p build
- ${SCALAC} -d build src/*.scala
- jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
- rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-multiple-rafter/testsource.scala b/test/pending/neg/plugin-multiple-rafter/testsource.scala
deleted file mode 100644
index f73db1eb60..0000000000
--- a/test/pending/neg/plugin-multiple-rafter/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- println("multi-rafter")
-}
-
diff --git a/test/pending/neg/plugin-rafter-before-1.check b/test/pending/neg/plugin-rafter-before-1.check
deleted file mode 100644
index 19ed4d2fba..0000000000
--- a/test/pending/neg/plugin-rafter-before-1.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error: fatal error: phase erasure want to run right after explicitouter, but some phase has declared to run before erasure. Re-run with -Xgenerate-phase-graph <filename> to better see the problem.
-one error found
diff --git a/test/pending/neg/plugin-rafter-before-1.flags b/test/pending/neg/plugin-rafter-before-1.flags
deleted file mode 100644
index 8bf03145b9..0000000000
--- a/test/pending/neg/plugin-rafter-before-1.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-rafter-before-1/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
deleted file mode 100644
index 8ad591b6ea..0000000000
--- a/test/pending/neg/plugin-rafter-before-1/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-af91fd67ccef349e7f8ea662615e17796a339485 ?plugins.jar
diff --git a/test/pending/neg/plugin-rafter-before-1/misc/build.sh b/test/pending/neg/plugin-rafter-before-1/misc/build.sh
deleted file mode 100755
index 8899009d7f..0000000000
--- a/test/pending/neg/plugin-rafter-before-1/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
- mkdir -p build
- ${SCALAC} -d build src/*.scala
- jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
- rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-rafter-before-1/testsource.scala b/test/pending/neg/plugin-rafter-before-1/testsource.scala
deleted file mode 100644
index 836459db22..0000000000
--- a/test/pending/neg/plugin-rafter-before-1/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- println("rafter-before-1")
-}
-
diff --git a/test/pending/neg/plugin-rightafter-terminal.flags b/test/pending/neg/plugin-rightafter-terminal.flags
deleted file mode 100644
index 948a318668..0000000000
--- a/test/pending/neg/plugin-rightafter-terminal.flags
+++ /dev/null
@@ -1,2 +0,0 @@
--Xplugin:files/neg/plugin-rightafter-terminal/lib/plugins.jar
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1 b/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
deleted file mode 100644
index c2e2b9cd43..0000000000
--- a/test/pending/neg/plugin-rightafter-terminal/lib/plugins.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8cccde4914da2058dca893783c231cda23855603 ?plugins.jar
diff --git a/test/pending/neg/plugin-rightafter-terminal/misc/build.sh b/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
deleted file mode 100755
index 8899009d7f..0000000000
--- a/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-SCALAC="../../../../build/pack/bin/scalac -deprecation -cp ../../../../build/quick/classes/compiler/"
-
-BASE=`pwd`
-
-if [[ -d "${BASE}/src" ]] ; then
-
- mkdir -p build
- ${SCALAC} -d build src/*.scala
- jar cf lib/plugins.jar -C misc/ scalac-plugin.xml -C build .
- rm -rf build
-fi
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml b/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
deleted file mode 100644
index 90ff27dc2a..0000000000
--- a/test/pending/neg/plugin-rightafter-terminal/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<plugin>
- <name>beforeparser</name>
- <classname>scala.test.plugins.ThePlugin</classname>
-</plugin>
-
diff --git a/test/pending/neg/plugin-rightafter-terminal/testsource.scala b/test/pending/neg/plugin-rightafter-terminal/testsource.scala
deleted file mode 100644
index 7af767b638..0000000000
--- a/test/pending/neg/plugin-rightafter-terminal/testsource.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends Application {
- println("rightafterterminal")
-}
-
diff --git a/test/pending/neg/t7441.check b/test/pending/neg/t7441.check
new file mode 100644
index 0000000000..f259457197
--- /dev/null
+++ b/test/pending/neg/t7441.check
@@ -0,0 +1,6 @@
+t7441.scala:4: error: type mismatch;
+ found : Int(1)
+ required: List[Any]
+ def test = apply(1)
+ ^
+one error found
diff --git a/test/pending/neg/t7441.scala b/test/pending/neg/t7441.scala
new file mode 100644
index 0000000000..dad7421e3f
--- /dev/null
+++ b/test/pending/neg/t7441.scala
@@ -0,0 +1,7 @@
+object Test {
+ object Bar {
+ def apply(xs: List[Any]): Int = 0
+ def test = apply(1)
+ }
+ implicit def foo = 1
+}
diff --git a/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala b/test/pending/neg/t7494-cyclic-dependency/ThePlugin.scala
index bd94ce60d7..bd94ce60d7 100644
--- a/test/pending/neg/plugin-cyclic-dependency/src/ThePlugin.scala
+++ b/test/pending/neg/t7494-cyclic-dependency/ThePlugin.scala
diff --git a/test/pending/neg/t7494-cyclic-dependency/sample_2.flags b/test/pending/neg/t7494-cyclic-dependency/sample_2.flags
new file mode 100644
index 0000000000..db25b88a12
--- /dev/null
+++ b/test/pending/neg/t7494-cyclic-dependency/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:cyclicdependency
diff --git a/test/pending/neg/t7494-cyclic-dependency/sample_2.scala b/test/pending/neg/t7494-cyclic-dependency/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/pending/neg/t7494-cyclic-dependency/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/pending/neg/t7494-cyclic-dependency/scalac-plugin.xml b/test/pending/neg/t7494-cyclic-dependency/scalac-plugin.xml
new file mode 100644
index 0000000000..2558d6fd03
--- /dev/null
+++ b/test/pending/neg/t7494-cyclic-dependency/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+ <name>ignored</name>
+ <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/pending/pos/t1786.scala b/test/pending/pos/t1786.scala
deleted file mode 100644
index dca2edaab4..0000000000
--- a/test/pending/pos/t1786.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/** This a consequence of the current type checking algorithm, where bounds
- * are checked only after variables are instantiated. I believe this will change once we go to contraint-based type inference. Assigning low priority until then.
- *
- *
- */
-class SomeClass(val intValue:Int)
-class MyClass[T <: SomeClass](val myValue:T)
-
-object Test extends Application {
- def myMethod(i:MyClass[_]) {
- i.myValue.intValue/2 // << error i is of type Any
- }
-
- def myMethod(i:MyClass[_ <: SomeClass]) {
- i.myValue.intValue/2 // << works
- }
-}
-/*
-The below code shows a compiler flaw in that the wildcard "_" as value for a bounded type parameter either breaks the boundry - as it result in Any - or doesnt (as id hoped it to be) evaluates to the boundy.
-*/
diff --git a/test/files/pos/t7234.scala b/test/pending/pos/t7234.scala
index 59a233d835..59a233d835 100644
--- a/test/files/pos/t7234.scala
+++ b/test/pending/pos/t7234.scala
diff --git a/test/files/pos/t7234b.scala b/test/pending/pos/t7234b.scala
index fee98e87a8..fee98e87a8 100644
--- a/test/files/pos/t7234b.scala
+++ b/test/pending/pos/t7234b.scala
diff --git a/test/pending/pos/t7486.scala b/test/pending/pos/t7486.scala
new file mode 100644
index 0000000000..6dd7f4c4ac
--- /dev/null
+++ b/test/pending/pos/t7486.scala
@@ -0,0 +1,8 @@
+object Test{
+ var locker = 0
+ // remove implicit, or change to `locker = locker + 1` to make it compile.
+ implicit val davyJones0 = {
+ locker += 0
+ 0
+ }
+}
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import.check b/test/pending/run/macro-term-declared-in-anonymous-explicit-import.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/pending/run/macro-term-declared-in-anonymous-explicit-import.check
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala
new file mode 100644
index 0000000000..348f3420f2
--- /dev/null
+++ b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala
index 8bd8c172c9..dd2317b1b7 100644
--- a/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala
+++ b/test/pending/run/macro-term-declared-in-anonymous-explicit-import/Macros_Test_2.scala
@@ -1,3 +1,5 @@
+import language.experimental.macros
+
object Test extends App {
val macros = new { def foo = macro Impls.foo }
macros.foo
diff --git a/test/files/run/t6387.check b/test/pending/run/t6387.check
index 83b33d238d..83b33d238d 100644
--- a/test/files/run/t6387.check
+++ b/test/pending/run/t6387.check
diff --git a/test/files/run/t6387.scala b/test/pending/run/t6387.scala
index bbebb5f511..bbebb5f511 100644
--- a/test/files/run/t6387.scala
+++ b/test/pending/run/t6387.scala
diff --git a/test/scaladoc/run/SI-191-deprecated.scala b/test/scaladoc/run/SI-191-deprecated.scala
deleted file mode 100755
index 4ed24ff8d1..0000000000
--- a/test/scaladoc/run/SI-191-deprecated.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.nsc.doc.base._
-import scala.tools.nsc.doc.base.comment._
-import scala.tools.partest.ScaladocModelTest
-import java.net.{URI, URL}
-import java.io.File
-
-object Test extends ScaladocModelTest {
-
- override def code =
- """
- /** See:
- * - [[scala.collection.Map]] Simple linking
- * - [[scala.collection.immutable.::]] Linking with symbolic name
- * - [[scala.Int]].toLong Linking to a class
- * - [[scala.Predef]] Linking to an object
- * - [[scala.Int.toLong]] Linking to a method
- * - [[scala]] Linking to a package
- * - [[scala.AbstractMethodError]] Linking to a member in the package object
- * - [[scala.Predef.String]] Linking to a member in an object
- *
- * Don't look at:
- * - [[scala.NoLink]] Not linking :)
- */
- object Test {
- def foo(param: Any) {}
- def barr(l: scala.collection.immutable.List[Any]) {}
- def bar(l: List[String]) {} // TODO: Should be able to link to type aliases
- def baz(d: java.util.Date) {} // Should not be resolved
- }
- """
-
- def scalaURL = "http://bog.us"
-
- override def scaladocSettings = "-no-link-warnings -external-urls scala=" + scalaURL
-
- def testModel(rootPackage: Package) {
- import access._
- val test = rootPackage._object("Test")
-
- def check(memberDef: Def, expected: Int) {
- val externals = memberDef.valueParams(0)(0).resultType.refEntity collect {
- case (_, (LinkToExternal(name, url), _)) => assert(url.contains(scalaURL)); name
- }
- assert(externals.size == expected)
- }
-
- check(test._method("foo"), 1)
- check(test._method("bar"), 0)
- check(test._method("barr"), 2)
- check(test._method("baz"), 0)
-
- val expectedUrls = collection.mutable.Set[String](
- "scala.collection.Map",
- "scala.collection.immutable.::",
- "scala.Int",
- "scala.Predef$",
- "scala.Int@toLong:Long",
- "scala.package",
- "scala.package@AbstractMethodError=AbstractMethodError",
- "scala.Predef$@String=String"
- ).map(scalaURL + "/index.html#" + _)
-
- def isExpectedExternalLink(l: EntityLink) = l.link match {
- case LinkToExternal(name, url) => assert(expectedUrls contains url, url); true
- case _ => false
- }
-
- assert(countLinks(test.comment.get, isExpectedExternalLink) == 8,
- countLinks(test.comment.get, isExpectedExternalLink) + " == 8")
- }
-}
diff --git a/test/scaladoc/run/SI-6715.check b/test/scaladoc/run/SI-6715.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6715.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6715.scala b/test/scaladoc/run/SI-6715.scala
new file mode 100644
index 0000000000..92d3376234
--- /dev/null
+++ b/test/scaladoc/run/SI-6715.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ def scaladocSettings = ""
+
+ override def code = "object A { def $$ = 123 }"
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ val method = rootPackage._object("A")._method("$$")
+ assert(method != null)
+ }
+}
diff --git a/test/scaladoc/run/SI-191-deprecated.check b/test/scaladoc/run/SI-7367.check
index 3925a0d464..3925a0d464 100755
--- a/test/scaladoc/run/SI-191-deprecated.check
+++ b/test/scaladoc/run/SI-7367.check
diff --git a/test/scaladoc/run/SI-7367.scala b/test/scaladoc/run/SI-7367.scala
new file mode 100755
index 0000000000..6e5a317932
--- /dev/null
+++ b/test/scaladoc/run/SI-7367.scala
@@ -0,0 +1,25 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ class annot() extends annotation.StaticAnnotation {
+ def this(a: Any) = this()
+ }
+
+ @annot(0)
+ class B
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val annotations = root._class("B").annotations
+ assert(annotations.size == 1)
+ assert(annotations(0).annotationClass == root._class("annot"))
+ val args = annotations(0).arguments
+ assert(args.size == 1)
+ assert(args(0).value.expression == "0")
+ }
+}
diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check
index ab2aeb2d67..bfaa4ad0ed 100644
--- a/test/scaladoc/run/t5527.check
+++ b/test/scaladoc/run/t5527.check
@@ -1,13 +1,13 @@
-newSource1:17: warning: discarding unmoored doc comment
- /** Testing 123 */
- ^
-newSource1:27: warning: discarding unmoored doc comment
- /** Calculate this result. */
+newSource1.scala:47: warning: discarding unmoored doc comment
+ /** Document this crucial constant for posterity.
^
-newSource1:34: warning: discarding unmoored doc comment
- /** Another digit is a giveaway. */
+newSource1.scala:64: warning: discarding unmoored doc comment
+ /*************************\
^
-[[syntax trees at end of parser]] // newSource1
+newSource1.scala:73: warning: discarding unmoored doc comment
+ val i = 10 */** Important!
+ ^
+[[syntax trees at end of parser]] // newSource1.scala
package <empty> {
object UselessComments extends scala.AnyRef {
def <init>() = {
@@ -48,6 +48,26 @@ package <empty> {
val test4 = 'a' match {
case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true
case _ => false
+ };
+ def test5: scala.Unit = if (true)
+ $qmark$qmark$qmark
+ else
+ ();
+ def test6 = {
+ val u = 4;
+ 0.to(u).foreach(((i) => println(i)))
+ };
+ def test7 = {
+ val u = 4;
+ 0.to(u).foreach(((i) => println(i)))
+ };
+ def test8 = {
+ val z = "fancy";
+ z.replace("fanc", "arts")
+ };
+ def test9 = {
+ val i = 10.$times(10);
+ assert(i.$eq$eq(100))
}
};
/** comments that we should keep */
@@ -75,7 +95,7 @@ package <empty> {
()
};
/** T */
- type T >: _root_.scala.Nothing <: _root_.scala.Any;
+ type T;
/** f */
def f(i: Int): scala.Unit;
/** v */
@@ -102,7 +122,11 @@ package <empty> {
super.<init>();
()
}
- }
+ };
+ /** Get the simple value.
+ * @return the default value
+ */
+ def value: Int = 7
}
}
diff --git a/test/scaladoc/run/t5527.scala b/test/scaladoc/run/t5527.scala
index 2449ff60c3..60ae23c1a7 100644
--- a/test/scaladoc/run/t5527.scala
+++ b/test/scaladoc/run/t5527.scala
@@ -49,6 +49,47 @@ object Test extends DirectTest {
case _ =>
false
}
+
+ def test5 {
+ /** @martin is this right? It shouldn't flag me as scaladoc. */
+ if (true) ???
+ }
+
+ def test6 = {
+ /** Document this crucial constant for posterity.
+ * Don't forget to dedoc this comment if you refactor to a local.
+ * @author Paul Phillips
+ */
+ val u = 4
+ for (i <- 0 to u)
+ println(i)
+ }
+ def test7 = {
+ /** Some standard tags are tolerated locally and shouldn't trigger a warning.
+ * @note Don't change this unless you know what you're doing. This means you.
+ */
+ val u = 4
+ for (i <- 0 to u)
+ println(i)
+ }
+ def test8 = {
+ /*************************\
+ * Fancy ASCII Art Block *
+ * @author som-snytt *
+ \*************************/
+ // this is just a local
+ val z = "fancy"
+ z replace ("fanc", "arts")
+ }
+ def test9 = {
+ val i = 10 */** Important!
+ * We have to multiply here!
+ * @author community
+ * @see SI-1234
+ */
+ 10
+ assert(i == 100)
+ }
}
/** comments that we should keep */
@@ -85,6 +126,13 @@ object Test extends DirectTest {
/** class D */
@deprecated("use ... instead", "2.10.0")
class D
+
+ /** Get the simple value.
+ * @return the default value
+ */
+ // an intervening line comment
+ /* I had more to say, but didn't want to pollute the scaladoc. */
+ def value: Int = 7
}
""".trim
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index 96174d29d1..ff64a25602 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -25,10 +25,10 @@ class Factory(val g: Global, val s: doc.Settings)
}
def parseComment(s: String): Option[Inline] =
- strip(parse(s, "", scala.tools.nsc.util.NoPosition))
+ strip(parse(s, "", scala.tools.nsc.util.NoPosition, null))
def createBody(s: String) =
- parse(s, "", scala.tools.nsc.util.NoPosition).body
+ parse(s, "", scala.tools.nsc.util.NoPosition, null).body
}
object Test extends Properties("CommentFactory") {
@@ -45,7 +45,7 @@ object Test extends Properties("CommentFactory") {
with MemberLookup)
}
- def parse(src: String, dst: Inline) = {
+ def parse(src: String, dst: Inline): Boolean = {
factory.parseComment(src) match {
case Some(inline) =>
inline == dst
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index d7b5e48288..03348b81d2 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -2,6 +2,8 @@ import org.scalacheck._
import org.scalacheck.Prop._
import java.net.{URLClassLoader, URLDecoder}
+import scala.collection.mutable
+import scala.xml.NodeSeq
object XMLUtil {
import scala.xml._
@@ -34,21 +36,24 @@ object Test extends Properties("HtmlFactory") {
// this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
// does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
// this test _will_ fail again some time in the future.
- val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
- val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
- (paths ++ morepaths).mkString(java.io.File.pathSeparator)
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
}
def createFactory = {
val settings = new Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
settings.classpath.value = getClasspath
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
new DocFactory(reporter, settings)
}
- def createTemplates(basename: String) = {
- val result = scala.collection.mutable.Map[String, scala.xml.NodeSeq]()
+ def createTemplates(basename: String): collection.Map[String, NodeSeq] = {
+ val result = mutable.Map[String, NodeSeq]()
createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
case Some(universe) => {
@@ -57,7 +62,7 @@ object Test extends Properties("HtmlFactory") {
result += (page.absoluteLinkTo(page.path) -> page.body)
})
}
- case _ => ;
+ case _ =>
}
result
diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala
index 37f6947aaa..b8b9f92965 100644
--- a/test/scaladoc/scalacheck/IndexScriptTest.scala
+++ b/test/scaladoc/scalacheck/IndexScriptTest.scala
@@ -8,14 +8,20 @@ import java.net.{URLClassLoader, URLDecoder}
object Test extends Properties("IndexScript") {
def getClasspath = {
- val loader = Thread.currentThread.getContextClassLoader
- val paths = loader.asInstanceOf[URLClassLoader].getURLs
- val morepaths = loader.getParent.asInstanceOf[URLClassLoader].getURLs
- (paths ++ morepaths).map(u => URLDecoder.decode(u.getPath)).mkString(java.io.File.pathSeparator)
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
}
val docFactory = {
val settings = new doc.Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
settings.classpath.value = getClasspath
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
new doc.DocFactory(reporter, settings)
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index dc4ab126d4..abc0e5da01 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -12,19 +12,19 @@ object Test extends Properties("Index") {
// this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
// does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
// this test _will_ fail again some time in the future.
- val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
- val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
- (paths ++ morepaths).mkString(java.io.File.pathSeparator)
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
}
val docFactory = {
val settings = new doc.Settings({Console.err.println(_)})
-
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
settings.classpath.value = getClasspath
- println(settings.classpath.value)
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
-
new doc.DocFactory(reporter, settings)
}